var/home/core/zuul-output/0000755000175000017500000000000015067747432014544 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067763401015502 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004544142215067763371017722 0ustar rootrootOct 03 13:31:24 crc systemd[1]: Starting Kubernetes Kubelet... Oct 03 13:31:24 crc restorecon[4570]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 13:31:24 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 13:31:25 crc restorecon[4570]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 03 13:31:26 crc kubenswrapper[4861]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 13:31:26 crc kubenswrapper[4861]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 03 13:31:26 crc kubenswrapper[4861]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 13:31:26 crc kubenswrapper[4861]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 13:31:26 crc kubenswrapper[4861]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 03 13:31:26 crc kubenswrapper[4861]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.463035 4861 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467340 4861 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467361 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467366 4861 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467371 4861 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467375 4861 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467380 4861 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467385 4861 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467390 4861 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467395 4861 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467399 4861 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467403 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467408 4861 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467412 4861 feature_gate.go:330] unrecognized feature gate: Example Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467416 4861 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467419 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467423 4861 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467426 4861 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467431 4861 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467436 4861 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467440 4861 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467445 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467449 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467454 4861 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467458 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467463 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467473 4861 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467477 4861 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467481 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467484 4861 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467487 4861 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467491 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467494 4861 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467497 4861 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467501 4861 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467504 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467508 4861 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467514 4861 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467518 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467522 4861 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467526 4861 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467531 4861 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467535 4861 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467538 4861 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467542 4861 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467546 4861 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467550 4861 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467553 4861 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467557 4861 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467561 4861 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467565 4861 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467568 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467572 4861 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467575 4861 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467579 4861 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467582 4861 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467586 4861 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467589 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467593 4861 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467596 4861 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467599 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467605 4861 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467609 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467614 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467618 4861 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467623 4861 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467628 4861 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467632 4861 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467636 4861 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467640 4861 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467644 4861 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.467648 4861 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467741 4861 flags.go:64] FLAG: --address="0.0.0.0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467752 4861 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467759 4861 flags.go:64] FLAG: --anonymous-auth="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467764 4861 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467770 4861 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467776 4861 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467782 4861 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467789 4861 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467793 4861 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467797 4861 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467802 4861 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467806 4861 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467811 4861 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467815 4861 flags.go:64] FLAG: --cgroup-root="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467819 4861 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467823 4861 flags.go:64] FLAG: --client-ca-file="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467827 4861 flags.go:64] FLAG: --cloud-config="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467831 4861 flags.go:64] FLAG: --cloud-provider="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467835 4861 flags.go:64] FLAG: --cluster-dns="[]" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467841 4861 flags.go:64] FLAG: --cluster-domain="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467845 4861 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467852 4861 flags.go:64] FLAG: --config-dir="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467857 4861 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467863 4861 flags.go:64] FLAG: --container-log-max-files="5" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467869 4861 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467873 4861 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467877 4861 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467882 4861 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467886 4861 flags.go:64] FLAG: --contention-profiling="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467891 4861 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467895 4861 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467899 4861 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467903 4861 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467909 4861 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467913 4861 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467918 4861 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467922 4861 flags.go:64] FLAG: --enable-load-reader="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467927 4861 flags.go:64] FLAG: --enable-server="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467931 4861 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467936 4861 flags.go:64] FLAG: --event-burst="100" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467940 4861 flags.go:64] FLAG: --event-qps="50" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467944 4861 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467948 4861 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467953 4861 flags.go:64] FLAG: --eviction-hard="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467959 4861 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467963 4861 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467967 4861 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467971 4861 flags.go:64] FLAG: --eviction-soft="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467975 4861 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467979 4861 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467983 4861 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467988 4861 flags.go:64] FLAG: --experimental-mounter-path="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.467992 4861 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468002 4861 flags.go:64] FLAG: --fail-swap-on="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468006 4861 flags.go:64] FLAG: --feature-gates="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468011 4861 flags.go:64] FLAG: --file-check-frequency="20s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468015 4861 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468019 4861 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468024 4861 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468028 4861 flags.go:64] FLAG: --healthz-port="10248" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468032 4861 flags.go:64] FLAG: --help="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468037 4861 flags.go:64] FLAG: --hostname-override="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468040 4861 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468045 4861 flags.go:64] FLAG: --http-check-frequency="20s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468049 4861 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468053 4861 flags.go:64] FLAG: --image-credential-provider-config="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468057 4861 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468061 4861 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468065 4861 flags.go:64] FLAG: --image-service-endpoint="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468069 4861 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468073 4861 flags.go:64] FLAG: --kube-api-burst="100" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468077 4861 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468081 4861 flags.go:64] FLAG: --kube-api-qps="50" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468085 4861 flags.go:64] FLAG: --kube-reserved="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468089 4861 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468093 4861 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468097 4861 flags.go:64] FLAG: --kubelet-cgroups="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468101 4861 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468105 4861 flags.go:64] FLAG: --lock-file="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468110 4861 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468114 4861 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468118 4861 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468124 4861 flags.go:64] FLAG: --log-json-split-stream="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468128 4861 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468132 4861 flags.go:64] FLAG: --log-text-split-stream="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468138 4861 flags.go:64] FLAG: --logging-format="text" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468142 4861 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468146 4861 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468151 4861 flags.go:64] FLAG: --manifest-url="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468155 4861 flags.go:64] FLAG: --manifest-url-header="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468160 4861 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468164 4861 flags.go:64] FLAG: --max-open-files="1000000" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468169 4861 flags.go:64] FLAG: --max-pods="110" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468173 4861 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468178 4861 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468181 4861 flags.go:64] FLAG: --memory-manager-policy="None" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468185 4861 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468190 4861 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468194 4861 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468198 4861 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468208 4861 flags.go:64] FLAG: --node-status-max-images="50" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468212 4861 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468216 4861 flags.go:64] FLAG: --oom-score-adj="-999" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468221 4861 flags.go:64] FLAG: --pod-cidr="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468225 4861 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468249 4861 flags.go:64] FLAG: --pod-manifest-path="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468253 4861 flags.go:64] FLAG: --pod-max-pids="-1" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468257 4861 flags.go:64] FLAG: --pods-per-core="0" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468261 4861 flags.go:64] FLAG: --port="10250" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468266 4861 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468270 4861 flags.go:64] FLAG: --provider-id="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468275 4861 flags.go:64] FLAG: --qos-reserved="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468279 4861 flags.go:64] FLAG: --read-only-port="10255" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468283 4861 flags.go:64] FLAG: --register-node="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468287 4861 flags.go:64] FLAG: --register-schedulable="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468292 4861 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468299 4861 flags.go:64] FLAG: --registry-burst="10" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468305 4861 flags.go:64] FLAG: --registry-qps="5" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468309 4861 flags.go:64] FLAG: --reserved-cpus="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468313 4861 flags.go:64] FLAG: --reserved-memory="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468318 4861 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468322 4861 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468326 4861 flags.go:64] FLAG: --rotate-certificates="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468330 4861 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468334 4861 flags.go:64] FLAG: --runonce="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468338 4861 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468343 4861 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468347 4861 flags.go:64] FLAG: --seccomp-default="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468351 4861 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468355 4861 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468359 4861 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468364 4861 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468368 4861 flags.go:64] FLAG: --storage-driver-password="root" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468372 4861 flags.go:64] FLAG: --storage-driver-secure="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468377 4861 flags.go:64] FLAG: --storage-driver-table="stats" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468382 4861 flags.go:64] FLAG: --storage-driver-user="root" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468386 4861 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468391 4861 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468395 4861 flags.go:64] FLAG: --system-cgroups="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468399 4861 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468406 4861 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468410 4861 flags.go:64] FLAG: --tls-cert-file="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468414 4861 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468419 4861 flags.go:64] FLAG: --tls-min-version="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468424 4861 flags.go:64] FLAG: --tls-private-key-file="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468428 4861 flags.go:64] FLAG: --topology-manager-policy="none" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468432 4861 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468436 4861 flags.go:64] FLAG: --topology-manager-scope="container" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468440 4861 flags.go:64] FLAG: --v="2" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468449 4861 flags.go:64] FLAG: --version="false" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468455 4861 flags.go:64] FLAG: --vmodule="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468460 4861 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468465 4861 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468570 4861 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468576 4861 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468581 4861 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468586 4861 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468591 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468595 4861 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468600 4861 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468604 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468608 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468611 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468615 4861 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468618 4861 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468622 4861 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468626 4861 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468629 4861 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468633 4861 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468636 4861 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468640 4861 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468644 4861 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468647 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468651 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468654 4861 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468658 4861 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468662 4861 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468665 4861 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468668 4861 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468672 4861 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468675 4861 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468680 4861 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468684 4861 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468687 4861 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468691 4861 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468694 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468697 4861 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468703 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468707 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468711 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468716 4861 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468720 4861 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468725 4861 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468730 4861 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468734 4861 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468737 4861 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468741 4861 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468745 4861 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468749 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468752 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468755 4861 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468760 4861 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468763 4861 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468767 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468770 4861 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468774 4861 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468778 4861 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468783 4861 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468787 4861 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468791 4861 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468794 4861 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468798 4861 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468803 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468806 4861 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468810 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468813 4861 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468818 4861 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468822 4861 feature_gate.go:330] unrecognized feature gate: Example Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468826 4861 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468830 4861 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468834 4861 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468839 4861 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468844 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.468854 4861 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.468862 4861 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.475838 4861 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.475859 4861 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475916 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475922 4861 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475926 4861 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475930 4861 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475934 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475937 4861 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475941 4861 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475944 4861 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475948 4861 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475952 4861 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475957 4861 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475963 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475968 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475971 4861 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475975 4861 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475979 4861 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475982 4861 feature_gate.go:330] unrecognized feature gate: Example Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475985 4861 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475989 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475992 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.475996 4861 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476000 4861 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476003 4861 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476006 4861 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476010 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476013 4861 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476017 4861 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476021 4861 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476024 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476027 4861 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476031 4861 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476035 4861 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476038 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476042 4861 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476046 4861 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476050 4861 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476053 4861 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476058 4861 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476062 4861 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476067 4861 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476071 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476075 4861 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476079 4861 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476083 4861 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476087 4861 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476090 4861 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476094 4861 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476098 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476103 4861 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476107 4861 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476111 4861 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476115 4861 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476118 4861 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476122 4861 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476126 4861 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476129 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476133 4861 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476136 4861 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476140 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476143 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476147 4861 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476150 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476154 4861 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476157 4861 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476161 4861 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476164 4861 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476168 4861 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476171 4861 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476175 4861 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476178 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476182 4861 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.476188 4861 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476316 4861 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476330 4861 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476334 4861 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476339 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476343 4861 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476347 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476350 4861 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476355 4861 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476359 4861 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476363 4861 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476368 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476371 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476375 4861 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476378 4861 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476381 4861 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476386 4861 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476391 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476395 4861 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476398 4861 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476402 4861 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476406 4861 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476410 4861 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476413 4861 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476417 4861 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476421 4861 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476424 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476428 4861 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476431 4861 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476435 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476438 4861 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476442 4861 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476445 4861 feature_gate.go:330] unrecognized feature gate: Example Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476448 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476452 4861 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476456 4861 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476459 4861 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476463 4861 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476466 4861 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476470 4861 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476473 4861 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476477 4861 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476481 4861 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476485 4861 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476488 4861 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476491 4861 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476496 4861 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476500 4861 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476504 4861 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476508 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476512 4861 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476515 4861 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476519 4861 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476523 4861 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476527 4861 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476530 4861 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476534 4861 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476538 4861 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476541 4861 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476544 4861 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476548 4861 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476551 4861 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476555 4861 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476558 4861 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476562 4861 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476565 4861 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476569 4861 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476572 4861 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476575 4861 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476579 4861 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476582 4861 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.476586 4861 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.476592 4861 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.476754 4861 server.go:940] "Client rotation is on, will bootstrap in background" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.480689 4861 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.480785 4861 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.482330 4861 server.go:997] "Starting client certificate rotation" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.482357 4861 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.482571 4861 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-11 03:01:46.226404835 +0000 UTC Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.482737 4861 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2389h30m19.743670788s for next certificate rotation Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.517896 4861 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.523135 4861 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.542421 4861 log.go:25] "Validated CRI v1 runtime API" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.576332 4861 log.go:25] "Validated CRI v1 image API" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.578196 4861 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.583739 4861 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-03-13-25-20-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.583767 4861 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.598042 4861 manager.go:217] Machine: {Timestamp:2025-10-03 13:31:26.595664407 +0000 UTC m=+0.593649474 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:5c5136c5-33d2-4bef-9fd7-5251914e4451 BootID:9c733c76-1447-4e10-91a4-f1aaa7de6132 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:7c:c6:e0 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:7c:c6:e0 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:d1:ca:ad Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:19:d4:75 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:56:62:3b Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:67:f1:8a Speed:-1 Mtu:1496} {Name:eth10 MacAddress:1e:38:6c:3f:f9:1e Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:6e:56:9d:6e:b0:bd Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.598248 4861 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.598453 4861 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.599668 4861 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.599858 4861 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.599892 4861 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.600098 4861 topology_manager.go:138] "Creating topology manager with none policy" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.600110 4861 container_manager_linux.go:303] "Creating device plugin manager" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.600721 4861 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.600757 4861 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.600932 4861 state_mem.go:36] "Initialized new in-memory state store" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.601029 4861 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.604470 4861 kubelet.go:418] "Attempting to sync node with API server" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.604491 4861 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.604516 4861 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.604530 4861 kubelet.go:324] "Adding apiserver pod source" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.604545 4861 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.607689 4861 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.608585 4861 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.610846 4861 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.612643 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.612630 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.612743 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.612762 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612903 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612921 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612930 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612939 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612952 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612961 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612968 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612980 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612987 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.612994 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.613002 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.613009 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.613917 4861 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.614501 4861 server.go:1280] "Started kubelet" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.615695 4861 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.615696 4861 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 03 13:31:26 crc systemd[1]: Started Kubernetes Kubelet. Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.616714 4861 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.616937 4861 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.618218 4861 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.618270 4861 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.618987 4861 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.619019 4861 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.619135 4861 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.618489 4861 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 14:33:48.1958292 +0000 UTC Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.619374 4861 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1609h2m21.576469651s for next certificate rotation Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.620134 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.620291 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.621684 4861 factory.go:55] Registering systemd factory Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.621716 4861 factory.go:221] Registration of the systemd container factory successfully Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.621964 4861 factory.go:153] Registering CRI-O factory Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.621977 4861 factory.go:221] Registration of the crio container factory successfully Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.622006 4861 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.622070 4861 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.622094 4861 factory.go:103] Registering Raw factory Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.622111 4861 manager.go:1196] Started watching for new ooms in manager Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.623206 4861 manager.go:319] Starting recovery of all containers Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.623358 4861 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.28:6443: connect: connection refused" interval="200ms" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.626044 4861 server.go:460] "Adding debug handlers to kubelet server" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.632523 4861 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.28:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186afe5e948e9ceb default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 13:31:26.614355179 +0000 UTC m=+0.612340226,LastTimestamp:2025-10-03 13:31:26.614355179 +0000 UTC m=+0.612340226,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638751 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638872 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638889 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638922 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638936 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638952 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.638966 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639002 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639022 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639037 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639054 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639070 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639086 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639104 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639119 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639139 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639158 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639176 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639196 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639214 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639252 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639272 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639291 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639313 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639331 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639350 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.639991 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640011 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640030 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640048 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640066 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640085 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640322 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640342 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640357 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640376 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640392 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640481 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640504 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640522 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640539 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640557 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640575 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640596 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640611 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640628 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640643 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.640684 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641186 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641260 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641282 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641301 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641321 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641334 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641348 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641363 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641382 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641411 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641432 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641456 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641475 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641493 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641508 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641520 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641535 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641550 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641561 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.641575 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643670 4861 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643707 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643726 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643743 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643760 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643778 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643793 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643811 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643826 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643842 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643859 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643874 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643891 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643904 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643916 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643928 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643940 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643960 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643973 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.643989 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644002 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644016 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644050 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644063 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644075 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644086 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644131 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644145 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644159 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644173 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644186 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644200 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644211 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644221 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644248 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644260 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644275 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644293 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644308 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644323 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644335 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644347 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644361 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644374 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644393 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644431 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644448 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644460 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644471 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644482 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644494 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644511 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644522 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644533 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644544 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644555 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644565 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644575 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644587 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644600 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644613 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644626 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644636 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644646 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644658 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644671 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644681 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644691 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644701 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644712 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644725 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644737 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644751 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644767 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644780 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644793 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644806 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644817 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644829 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644840 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644853 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644864 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644875 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644890 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644901 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644912 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644923 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644933 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644944 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644955 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.644993 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645006 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645026 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645053 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645064 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645073 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645083 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645093 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645102 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645110 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645120 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645131 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645141 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645151 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645161 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645173 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645183 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645192 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645204 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645213 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645223 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645248 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645260 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645271 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645283 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645297 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645309 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645319 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645332 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645344 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645358 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645369 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645380 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645391 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645403 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645415 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645424 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645435 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645446 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645458 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645471 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645487 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645501 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645515 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645528 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645541 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645558 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645570 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645582 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645596 4861 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645610 4861 reconstruct.go:97] "Volume reconstruction finished" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.645620 4861 reconciler.go:26] "Reconciler: start to sync state" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.648833 4861 manager.go:324] Recovery completed Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.658724 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.660266 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.660295 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.660304 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.661185 4861 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.661205 4861 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.661226 4861 state_mem.go:36] "Initialized new in-memory state store" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.677942 4861 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.679759 4861 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.679820 4861 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.679863 4861 kubelet.go:2335] "Starting kubelet main sync loop" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.679928 4861 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 03 13:31:26 crc kubenswrapper[4861]: W1003 13:31:26.681579 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.681655 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.696093 4861 policy_none.go:49] "None policy: Start" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.697654 4861 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.697679 4861 state_mem.go:35] "Initializing new in-memory state store" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.722974 4861 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759220 4861 manager.go:334] "Starting Device Plugin manager" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759279 4861 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759294 4861 server.go:79] "Starting device plugin registration server" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759632 4861 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759659 4861 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759845 4861 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759906 4861 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.759912 4861 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.769338 4861 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.780943 4861 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.781140 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.785587 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.786013 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.786119 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.786486 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.787013 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.787105 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.788171 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.788214 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.788245 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.788802 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.789272 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.789285 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.789413 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.789658 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.789723 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.790625 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.790653 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.790664 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.791204 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.791290 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.791302 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.791483 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.791667 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.791720 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793094 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793124 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793136 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793082 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793167 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793177 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793252 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793381 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.793435 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794086 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794106 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794115 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794125 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794138 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794126 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794371 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.794394 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.795111 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.795135 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.795146 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.824296 4861 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.28:6443: connect: connection refused" interval="400ms" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847368 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847464 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847496 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847515 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847533 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847549 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847566 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847604 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847623 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847641 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847661 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847683 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847702 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.847718 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.860374 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.861786 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.861916 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.862016 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.862105 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:26 crc kubenswrapper[4861]: E1003 13:31:26.862671 4861 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.28:6443: connect: connection refused" node="crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949411 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949459 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949478 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949494 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949512 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949527 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949542 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949557 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949621 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949645 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949675 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949694 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949717 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949736 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949756 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949775 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949839 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949922 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949790 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949980 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950010 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950007 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950043 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.949742 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950073 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950082 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950103 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950108 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950120 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 13:31:26 crc kubenswrapper[4861]: I1003 13:31:26.950137 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.063148 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.064503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.064561 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.064579 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.064610 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:27 crc kubenswrapper[4861]: E1003 13:31:27.065119 4861 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.28:6443: connect: connection refused" node="crc" Oct 03 13:31:27 crc kubenswrapper[4861]: E1003 13:31:27.076942 4861 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.28:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186afe5e948e9ceb default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 13:31:26.614355179 +0000 UTC m=+0.612340226,LastTimestamp:2025-10-03 13:31:26.614355179 +0000 UTC m=+0.612340226,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.116603 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.135735 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.153544 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.176183 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.178816 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-c89194d1d775fe4d48160c162c933e2bbd74e025ebc85bb427bb0e7cbf9dcb20 WatchSource:0}: Error finding container c89194d1d775fe4d48160c162c933e2bbd74e025ebc85bb427bb0e7cbf9dcb20: Status 404 returned error can't find the container with id c89194d1d775fe4d48160c162c933e2bbd74e025ebc85bb427bb0e7cbf9dcb20 Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.179430 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2c68baca4e39ce371886c16c3d0d5295cb6c2ecc85b4474667bf07319ce0f26d WatchSource:0}: Error finding container 2c68baca4e39ce371886c16c3d0d5295cb6c2ecc85b4474667bf07319ce0f26d: Status 404 returned error can't find the container with id 2c68baca4e39ce371886c16c3d0d5295cb6c2ecc85b4474667bf07319ce0f26d Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.183329 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.184473 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-d626ec05e7dcca7ae190ee7d022a4859c9e82a00c16eb1f72552d015f50f560f WatchSource:0}: Error finding container d626ec05e7dcca7ae190ee7d022a4859c9e82a00c16eb1f72552d015f50f560f: Status 404 returned error can't find the container with id d626ec05e7dcca7ae190ee7d022a4859c9e82a00c16eb1f72552d015f50f560f Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.194510 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-d3c1aad2c6d2e6f42725668fca5572be178b98f716893c2d32ac97b42f9dc0d8 WatchSource:0}: Error finding container d3c1aad2c6d2e6f42725668fca5572be178b98f716893c2d32ac97b42f9dc0d8: Status 404 returned error can't find the container with id d3c1aad2c6d2e6f42725668fca5572be178b98f716893c2d32ac97b42f9dc0d8 Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.195857 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c9a38f246bdda5990ad63060bf90a0a28b5e38ffb2c7eb5dfde4a2b1b576f97f WatchSource:0}: Error finding container c9a38f246bdda5990ad63060bf90a0a28b5e38ffb2c7eb5dfde4a2b1b576f97f: Status 404 returned error can't find the container with id c9a38f246bdda5990ad63060bf90a0a28b5e38ffb2c7eb5dfde4a2b1b576f97f Oct 03 13:31:27 crc kubenswrapper[4861]: E1003 13:31:27.225169 4861 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.28:6443: connect: connection refused" interval="800ms" Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.454752 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:27 crc kubenswrapper[4861]: E1003 13:31:27.454852 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.466185 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.470092 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.470138 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.470149 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.470181 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:27 crc kubenswrapper[4861]: E1003 13:31:27.470674 4861 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.28:6443: connect: connection refused" node="crc" Oct 03 13:31:27 crc kubenswrapper[4861]: W1003 13:31:27.534168 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:27 crc kubenswrapper[4861]: E1003 13:31:27.534292 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.618255 4861 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.686187 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c9a38f246bdda5990ad63060bf90a0a28b5e38ffb2c7eb5dfde4a2b1b576f97f"} Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.687351 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d626ec05e7dcca7ae190ee7d022a4859c9e82a00c16eb1f72552d015f50f560f"} Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.688297 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2c68baca4e39ce371886c16c3d0d5295cb6c2ecc85b4474667bf07319ce0f26d"} Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.689117 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c89194d1d775fe4d48160c162c933e2bbd74e025ebc85bb427bb0e7cbf9dcb20"} Oct 03 13:31:27 crc kubenswrapper[4861]: I1003 13:31:27.690885 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d3c1aad2c6d2e6f42725668fca5572be178b98f716893c2d32ac97b42f9dc0d8"} Oct 03 13:31:28 crc kubenswrapper[4861]: E1003 13:31:28.026279 4861 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.28:6443: connect: connection refused" interval="1.6s" Oct 03 13:31:28 crc kubenswrapper[4861]: W1003 13:31:28.029566 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:28 crc kubenswrapper[4861]: E1003 13:31:28.029628 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:28 crc kubenswrapper[4861]: W1003 13:31:28.231906 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:28 crc kubenswrapper[4861]: E1003 13:31:28.232004 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.271111 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.273275 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.273327 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.273341 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.273377 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:28 crc kubenswrapper[4861]: E1003 13:31:28.274015 4861 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.28:6443: connect: connection refused" node="crc" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.620030 4861 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.695811 4861 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="59521519c2b5394d291ce11d88a02fd108bfa859ad95bccd8be2332ccdd1461a" exitCode=0 Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.695904 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"59521519c2b5394d291ce11d88a02fd108bfa859ad95bccd8be2332ccdd1461a"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.696099 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.697430 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.697468 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.697481 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.699115 4861 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="11c4a586f59924aea976318f645b07cf22fde7f2129996691be3eafe7b19c85a" exitCode=0 Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.699261 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"11c4a586f59924aea976318f645b07cf22fde7f2129996691be3eafe7b19c85a"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.699276 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.700964 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.701012 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.701029 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.703451 4861 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c" exitCode=0 Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.703525 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.703571 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.704741 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.704777 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.704790 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.707767 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.707814 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.707826 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.707831 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.707916 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.708768 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.708803 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.708819 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.709904 4861 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af" exitCode=0 Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.709992 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.710478 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af"} Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.711797 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.711825 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.711832 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.716340 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.717522 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.717552 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:28 crc kubenswrapper[4861]: I1003 13:31:28.717562 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:29 crc kubenswrapper[4861]: W1003 13:31:29.539083 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:29 crc kubenswrapper[4861]: E1003 13:31:29.539182 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:29 crc kubenswrapper[4861]: W1003 13:31:29.545140 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:29 crc kubenswrapper[4861]: E1003 13:31:29.545461 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.28:6443: connect: connection refused" logger="UnhandledError" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.617802 4861 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.28:6443: connect: connection refused Oct 03 13:31:29 crc kubenswrapper[4861]: E1003 13:31:29.627550 4861 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.28:6443: connect: connection refused" interval="3.2s" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.715503 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.715563 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.715578 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.715589 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.717087 4861 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a2b98c4f0e137d7bd6f137b72a7f18f40857f9a31ce7371ddd7656b7b63f6228" exitCode=0 Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.717147 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a2b98c4f0e137d7bd6f137b72a7f18f40857f9a31ce7371ddd7656b7b63f6228"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.717305 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.718529 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.718565 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.718576 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.722626 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"91be83d27c4db99504f5495585739911e698901210e25890fa2660db8ab5703a"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.722680 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.724809 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.724849 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.724957 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.733687 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.733908 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.733927 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.733984 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.733999 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f"} Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.735585 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.735641 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.735657 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.736720 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.736758 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.736768 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.874936 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.878171 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.878250 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.878264 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:29 crc kubenswrapper[4861]: I1003 13:31:29.878297 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:29 crc kubenswrapper[4861]: E1003 13:31:29.879126 4861 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.28:6443: connect: connection refused" node="crc" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.738942 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33"} Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.739012 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.739726 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.739748 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.739757 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741185 4861 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d62a6e92cb6b25203cdb1055b0988c17f0d43663dc77c9bafa9d34b22e484ee7" exitCode=0 Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741267 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d62a6e92cb6b25203cdb1055b0988c17f0d43663dc77c9bafa9d34b22e484ee7"} Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741285 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741328 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741332 4861 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741464 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.741991 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742039 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742205 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742248 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742265 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742897 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742922 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.742931 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:30 crc kubenswrapper[4861]: I1003 13:31:30.756267 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.579274 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.579671 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.581169 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.581210 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.581220 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748380 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"18f455b553713cc3bb86d6f19e96a5b45c9dfc0d2050f878d3b0cc427b0d5a67"} Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748435 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3ef20785329a95fd324b22227bc0819f8d2e620708c067b7b22af858e8396712"} Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748454 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eb45d75d69e22edf8e37eef797ab42797ceaaad66cc3d3be5fc18e553f419a00"} Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748463 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"549e859de9d490090964939068a1b382229d05f4551c099f02330f6d44b7b459"} Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748472 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"46290393bce330d1be9a12b851523d0e2604f2162b956f963639d3033d7ac87a"} Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748471 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748508 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.748511 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.749379 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.749425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.749443 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.749464 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.749464 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:31 crc kubenswrapper[4861]: I1003 13:31:31.749610 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.074390 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.074560 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.075600 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.075659 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.075673 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.750271 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.750272 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.751513 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.751547 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.751559 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.752123 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.752167 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.752178 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:32 crc kubenswrapper[4861]: I1003 13:31:32.940912 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.079505 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.080557 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.080596 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.080609 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.080635 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.328315 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.601542 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.752207 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.752207 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.753450 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.753489 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.753500 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.753506 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.753530 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:33 crc kubenswrapper[4861]: I1003 13:31:33.753541 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:34 crc kubenswrapper[4861]: I1003 13:31:34.580328 4861 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 13:31:34 crc kubenswrapper[4861]: I1003 13:31:34.580488 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:31:34 crc kubenswrapper[4861]: I1003 13:31:34.754579 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:34 crc kubenswrapper[4861]: I1003 13:31:34.755430 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:34 crc kubenswrapper[4861]: I1003 13:31:34.755471 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:34 crc kubenswrapper[4861]: I1003 13:31:34.755483 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.345895 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.346159 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.348050 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.348109 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.348126 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.372030 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.402363 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.759405 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.759545 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.760425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.760464 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:36 crc kubenswrapper[4861]: I1003 13:31:36.760476 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:36 crc kubenswrapper[4861]: E1003 13:31:36.769439 4861 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 13:31:37 crc kubenswrapper[4861]: I1003 13:31:37.761854 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:37 crc kubenswrapper[4861]: I1003 13:31:37.763202 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:37 crc kubenswrapper[4861]: I1003 13:31:37.763262 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:37 crc kubenswrapper[4861]: I1003 13:31:37.763276 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:37 crc kubenswrapper[4861]: I1003 13:31:37.768527 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:38 crc kubenswrapper[4861]: I1003 13:31:38.764374 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:38 crc kubenswrapper[4861]: I1003 13:31:38.765301 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:38 crc kubenswrapper[4861]: I1003 13:31:38.765929 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:38 crc kubenswrapper[4861]: I1003 13:31:38.765980 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:40 crc kubenswrapper[4861]: W1003 13:31:40.327036 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.327408 4861 trace.go:236] Trace[788645731]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 13:31:30.325) (total time: 10001ms): Oct 03 13:31:40 crc kubenswrapper[4861]: Trace[788645731]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:31:40.327) Oct 03 13:31:40 crc kubenswrapper[4861]: Trace[788645731]: [10.001593289s] [10.001593289s] END Oct 03 13:31:40 crc kubenswrapper[4861]: E1003 13:31:40.327437 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 03 13:31:40 crc kubenswrapper[4861]: W1003 13:31:40.538635 4861 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.538744 4861 trace.go:236] Trace[1565919856]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 13:31:30.537) (total time: 10001ms): Oct 03 13:31:40 crc kubenswrapper[4861]: Trace[1565919856]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:31:40.538) Oct 03 13:31:40 crc kubenswrapper[4861]: Trace[1565919856]: [10.001158517s] [10.001158517s] END Oct 03 13:31:40 crc kubenswrapper[4861]: E1003 13:31:40.538770 4861 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.618131 4861 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.756571 4861 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.756631 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.789301 4861 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49166->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 03 13:31:40 crc kubenswrapper[4861]: I1003 13:31:40.789365 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49166->192.168.126.11:17697: read: connection reset by peer" Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.780454 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.781841 4861 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33" exitCode=255 Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.781883 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33"} Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.782090 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.782903 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.782947 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.782959 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:41 crc kubenswrapper[4861]: I1003 13:31:41.783560 4861 scope.go:117] "RemoveContainer" containerID="83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33" Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.466765 4861 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.466836 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.788572 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.790702 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9"} Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.790905 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.791848 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.791899 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:42 crc kubenswrapper[4861]: I1003 13:31:42.791915 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.381106 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.381311 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.382417 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.382465 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.382478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.395672 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.793130 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.794168 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.794208 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:43 crc kubenswrapper[4861]: I1003 13:31:43.794217 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:44 crc kubenswrapper[4861]: I1003 13:31:44.580559 4861 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 13:31:44 crc kubenswrapper[4861]: I1003 13:31:44.580709 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.762399 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.762534 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.762645 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.763556 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.763605 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.763619 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.766522 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.797087 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.798000 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.798036 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.798045 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.910968 4861 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 03 13:31:45 crc kubenswrapper[4861]: I1003 13:31:45.957906 4861 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 03 13:31:46 crc kubenswrapper[4861]: E1003 13:31:46.769533 4861 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 13:31:46 crc kubenswrapper[4861]: I1003 13:31:46.799692 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:46 crc kubenswrapper[4861]: I1003 13:31:46.800431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:46 crc kubenswrapper[4861]: I1003 13:31:46.800489 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:46 crc kubenswrapper[4861]: I1003 13:31:46.800507 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.449523 4861 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.453925 4861 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.454002 4861 trace.go:236] Trace[2075906659]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 13:31:34.923) (total time: 12530ms): Oct 03 13:31:47 crc kubenswrapper[4861]: Trace[2075906659]: ---"Objects listed" error: 12530ms (13:31:47.453) Oct 03 13:31:47 crc kubenswrapper[4861]: Trace[2075906659]: [12.530689213s] [12.530689213s] END Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.454214 4861 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.454243 4861 trace.go:236] Trace[1051099043]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 13:31:35.607) (total time: 11846ms): Oct 03 13:31:47 crc kubenswrapper[4861]: Trace[1051099043]: ---"Objects listed" error: 11846ms (13:31:47.454) Oct 03 13:31:47 crc kubenswrapper[4861]: Trace[1051099043]: [11.846220053s] [11.846220053s] END Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.454319 4861 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.469716 4861 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.615561 4861 apiserver.go:52] "Watching apiserver" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.619154 4861 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.619601 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.619970 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.620000 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.620084 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.620082 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.620149 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.620177 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.620190 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.620383 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.620424 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623133 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623138 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623146 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623150 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623310 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623787 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.623573 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.626013 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.626131 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.649929 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.662967 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.677755 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.691468 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.701156 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.711219 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.720313 4861 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.722269 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.734655 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756522 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756569 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756588 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756603 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756620 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756635 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756652 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756668 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756685 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756698 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756716 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756732 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756750 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756765 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756779 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756798 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756812 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756828 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756844 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756859 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756872 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756888 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756901 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756916 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756929 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756943 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756957 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756973 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.756988 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757011 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757026 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757044 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757060 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757075 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757093 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757108 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757123 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757138 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757153 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757169 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757183 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757201 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757218 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757249 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757264 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757279 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757294 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757336 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757354 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757369 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757384 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757400 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757417 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757432 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757447 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757463 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757479 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757500 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757523 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757545 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757567 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757586 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757600 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757614 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757632 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757646 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757662 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757705 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757724 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757738 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757754 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757770 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757785 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757800 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757818 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757833 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757849 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757867 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757882 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757897 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757914 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757930 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757948 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757964 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757979 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757995 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758010 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758025 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758039 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758055 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758070 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758085 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758101 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758116 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758131 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758146 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758162 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758177 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758194 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758209 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758242 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758259 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758275 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758290 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758306 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758323 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758338 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758353 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758369 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758385 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758400 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758415 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758430 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758444 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758462 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758477 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758498 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758529 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758554 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758575 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758595 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758611 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758629 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758648 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758665 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758680 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758697 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758714 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758730 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758745 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758762 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758778 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758794 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758811 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758827 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758843 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758864 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758879 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758895 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758911 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758927 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758943 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758959 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758975 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758992 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759008 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759025 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759040 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759056 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759071 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759086 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759101 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759119 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759136 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759153 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759169 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759193 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759209 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759373 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759394 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757008 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757293 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757442 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757517 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757612 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757717 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757765 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.757897 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758273 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758466 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758487 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758629 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758682 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758776 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758898 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.758929 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759107 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759135 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759555 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759577 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759358 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759648 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759729 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759411 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759774 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759791 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759818 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759838 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759856 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759875 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759886 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759893 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759911 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759929 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759937 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759946 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759965 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759982 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760000 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760018 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760036 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760038 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760068 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760086 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760090 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760102 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760122 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760137 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760158 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760177 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760195 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760212 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759397 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.759189 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760613 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760634 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760650 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760666 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760682 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760698 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760713 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760728 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760744 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760760 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760775 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760811 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760830 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760847 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760865 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760894 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760912 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760932 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760953 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760969 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.760988 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761008 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761024 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761040 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761055 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761103 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761114 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761125 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761139 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761136 4861 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761245 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761258 4861 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761268 4861 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761277 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761286 4861 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761295 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761304 4861 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761359 4861 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761370 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761384 4861 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761393 4861 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761402 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761411 4861 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761420 4861 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761429 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761440 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761449 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761459 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761469 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761479 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761478 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761491 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761551 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761560 4861 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761596 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761633 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.761811 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762033 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762102 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762156 4861 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762366 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762407 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762580 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762693 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.762976 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763000 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763066 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763296 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763386 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763438 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763643 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763761 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763982 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.763993 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.764004 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.764034 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.764494 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.764578 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.764610 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.764964 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.765045 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.765722 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.765998 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766054 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766302 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766136 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766384 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766598 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766658 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.766763 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.767144 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.767314 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.767432 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.767466 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.769177 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.770390 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.770672 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.770893 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.771120 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.771500 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.771610 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.771801 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.772149 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.772287 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.772925 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.773362 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.773425 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.773612 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.773883 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.774274 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.774375 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.774960 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.775279 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.775408 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.775624 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.775840 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.776367 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.776448 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.776485 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.776823 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.777447 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.777719 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.779289 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.779635 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:31:48.279609378 +0000 UTC m=+22.277594505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.780307 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.780690 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.780981 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.781049 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.781174 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.781381 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.781636 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.781840 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.781888 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782027 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782082 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782125 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782391 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782487 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782637 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782686 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.782917 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.783151 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.784032 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.784309 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.784341 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.784463 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.784668 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.783580 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785069 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785433 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785586 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785673 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785725 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785798 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785740 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.785740 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.784450 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.786617 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.786769 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.787758 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.787783 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.787849 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:48.287807398 +0000 UTC m=+22.285792455 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.787871 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.788133 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.788148 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.788168 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:48.288158417 +0000 UTC m=+22.286143564 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.788195 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.787612 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.788322 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.788706 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.789251 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.790002 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.790429 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.790578 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.791831 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.792117 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.792208 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.793132 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.793370 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.793557 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.793736 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.794744 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.794752 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.794911 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.795070 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.795172 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.795454 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.795917 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.796290 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.796723 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.796746 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.797182 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.797756 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.798708 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.799090 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.799173 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.799612 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.799688 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.799902 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.800610 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.801202 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.801537 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.801789 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.802118 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.802352 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.802964 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803081 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803272 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803286 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803793 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803820 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803886 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803892 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.803940 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.804132 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.804036 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.804563 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.804588 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.804602 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.804658 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:48.304641328 +0000 UTC m=+22.302626455 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.805103 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.805179 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.805196 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:47 crc kubenswrapper[4861]: E1003 13:31:47.805297 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:48.305261445 +0000 UTC m=+22.303246582 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.806224 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.807362 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.807580 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.807645 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.807797 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.807846 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.810561 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.810827 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.811425 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.819331 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.825722 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.850575 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.853169 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.863942 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.863976 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864030 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864042 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864053 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864063 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864074 4861 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864083 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864093 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864103 4861 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864114 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864127 4861 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864137 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864148 4861 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864159 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864169 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864179 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864190 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864201 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864211 4861 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864219 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864255 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864264 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864272 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864281 4861 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864289 4861 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864298 4861 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864308 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864320 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864332 4861 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864342 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864354 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864365 4861 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864375 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864384 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864393 4861 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864405 4861 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864415 4861 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864425 4861 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864436 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864446 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864457 4861 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864468 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864475 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864483 4861 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864492 4861 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864500 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864509 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864517 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864526 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864534 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864541 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864549 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864557 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864565 4861 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864573 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864582 4861 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864591 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864599 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864606 4861 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864615 4861 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864622 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864630 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864637 4861 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864645 4861 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864653 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864661 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864669 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864678 4861 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864686 4861 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864693 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864701 4861 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864709 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864716 4861 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864725 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864733 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864740 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864748 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864756 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864764 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864772 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864779 4861 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864787 4861 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864794 4861 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864802 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864809 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864817 4861 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864826 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864834 4861 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864842 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864850 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864860 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864871 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864881 4861 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864892 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864904 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864914 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864925 4861 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864936 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864947 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864957 4861 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864971 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864982 4861 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.864993 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865003 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865014 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865025 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865036 4861 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865048 4861 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865059 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865070 4861 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865081 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865093 4861 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865103 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865113 4861 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865124 4861 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865134 4861 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865145 4861 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865157 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865168 4861 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865180 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865191 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865203 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865214 4861 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865225 4861 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865256 4861 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865268 4861 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865280 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865292 4861 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865323 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865335 4861 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865346 4861 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865358 4861 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865370 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865383 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865393 4861 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865404 4861 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865416 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865427 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865438 4861 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865449 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865463 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865474 4861 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865486 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865498 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865509 4861 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865526 4861 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865537 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865548 4861 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865559 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865571 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865582 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865592 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865603 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865615 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865625 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865636 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865647 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865657 4861 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865669 4861 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865680 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865692 4861 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865705 4861 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865717 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865729 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865740 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865750 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865761 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865833 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.865881 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.866139 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.937057 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.947731 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.951913 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 13:31:47 crc kubenswrapper[4861]: I1003 13:31:47.966849 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.369652 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.369832 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:31:49.369806229 +0000 UTC m=+23.367791276 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.370017 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.370039 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.370057 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.370078 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370181 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370190 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370251 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:49.370221589 +0000 UTC m=+23.368206626 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370267 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:49.37026049 +0000 UTC m=+23.368245527 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370279 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370290 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370303 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370348 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370363 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370318 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370424 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:49.370402614 +0000 UTC m=+23.368387721 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.370452 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:49.370442925 +0000 UTC m=+23.368427972 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.684406 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.685044 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.686023 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.686784 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.687481 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.688064 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.688861 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.689556 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.690345 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.693068 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.693703 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.695018 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.695704 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.696322 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.697440 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.698062 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.699650 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.700119 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.700837 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.703814 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.704330 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.705519 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.705956 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.710294 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.710912 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.711665 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.713108 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.713689 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.714829 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.715374 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.716265 4861 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.716368 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.718053 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.718568 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.719355 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.720955 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.721836 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.722922 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.723712 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.726755 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.727263 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.728517 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.729170 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.730369 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.730944 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.733679 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.734217 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.735366 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.735817 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.736847 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.737670 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.738503 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.739260 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.739845 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.808405 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.808452 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.808466 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6d7902540dcd0f224525aa9c26d847a9d7ac0f23f80f5efc41c2bb19cd2b5192"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.810550 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.810904 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.812617 4861 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9" exitCode=255 Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.812684 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.812737 4861 scope.go:117] "RemoveContainer" containerID="83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.813840 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bdc42c1798ed650fe16130c3cd5ee5e52b919bb9da152e1eb2feaf0fe1054b31"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.815836 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.815871 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ada1cc53fcbee2fdcfa69fa267d18185e44313cb452c6321b2f201a1e2ac8085"} Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.823282 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.838371 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.857429 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.869689 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.871260 4861 scope.go:117] "RemoveContainer" containerID="cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9" Oct 03 13:31:48 crc kubenswrapper[4861]: E1003 13:31:48.871471 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.879377 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.901532 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.919144 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.947292 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:48 crc kubenswrapper[4861]: I1003 13:31:48.984762 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:41Z\\\",\\\"message\\\":\\\"W1003 13:31:29.881099 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1003 13:31:29.881520 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759498289 cert, and key in /tmp/serving-cert-3863988678/serving-signer.crt, /tmp/serving-cert-3863988678/serving-signer.key\\\\nI1003 13:31:30.211791 1 observer_polling.go:159] Starting file observer\\\\nW1003 13:31:30.215003 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1003 13:31:30.215163 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:30.217928 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3863988678/tls.crt::/tmp/serving-cert-3863988678/tls.key\\\\\\\"\\\\nF1003 13:31:40.785503 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.007306 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.037016 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.052421 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.061870 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.072050 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.371365 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.380376 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.380466 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.380494 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.380514 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380579 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:31:51.380551342 +0000 UTC m=+25.378536389 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380618 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380632 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.380664 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380700 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:51.380681565 +0000 UTC m=+25.378666642 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380761 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380784 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380827 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380842 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380799 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:51.380792198 +0000 UTC m=+25.378777245 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380636 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380931 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:51.380904581 +0000 UTC m=+25.378889628 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380940 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.380990 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:51.380978393 +0000 UTC m=+25.378963580 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.465204 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-wm76s"] Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.465803 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jwgvx"] Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.465967 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.466027 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.472332 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.472548 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.472575 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.472580 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.472675 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.473304 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5twn4"] Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.473399 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.473675 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.476993 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-t9slw"] Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.477264 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.477325 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-c97s6"] Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.477549 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.477872 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.490864 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491047 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491096 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491526 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491534 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491794 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491912 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.491920 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.492098 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.492167 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.492251 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.492591 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.493500 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.493884 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.497883 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.501029 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.519141 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.533720 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.549817 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.562405 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.579069 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:41Z\\\",\\\"message\\\":\\\"W1003 13:31:29.881099 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1003 13:31:29.881520 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759498289 cert, and key in /tmp/serving-cert-3863988678/serving-signer.crt, /tmp/serving-cert-3863988678/serving-signer.key\\\\nI1003 13:31:30.211791 1 observer_polling.go:159] Starting file observer\\\\nW1003 13:31:30.215003 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1003 13:31:30.215163 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:30.217928 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3863988678/tls.crt::/tmp/serving-cert-3863988678/tls.key\\\\\\\"\\\\nF1003 13:31:40.785503 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582127 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-system-cni-dir\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582162 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-os-release\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582177 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f714b7db-082f-4c2c-8239-ba5df6986c13-cni-binary-copy\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582194 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-script-lib\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582211 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-slash\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582241 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e1217d91-8c47-4353-b363-96c9de2cdb56-hosts-file\") pod \"node-resolver-c97s6\" (UID: \"e1217d91-8c47-4353-b363-96c9de2cdb56\") " pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582260 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-cnibin\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582329 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66ed4999-426b-4615-bfb3-764a3ecc950f-ovn-node-metrics-cert\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582389 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582412 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-cni-bin\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582442 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-cnibin\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582500 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-etc-kubernetes\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582537 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582566 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-log-socket\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582586 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d8335d3f-417e-4114-b306-a3d8f6c31348-mcd-auth-proxy-config\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582626 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-socket-dir-parent\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582658 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-daemon-config\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582683 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-conf-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582703 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3bf3157b-44d1-4bb3-b185-71523a80c054-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582738 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-systemd-units\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582759 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-ovn-kubernetes\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582780 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d8335d3f-417e-4114-b306-a3d8f6c31348-rootfs\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582798 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3bf3157b-44d1-4bb3-b185-71523a80c054-cni-binary-copy\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582828 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-netns\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582848 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-systemd\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582866 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-etc-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582887 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-node-log\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582914 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.582966 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-cni-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583029 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-k8s-cni-cncf-io\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583056 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvshn\" (UniqueName: \"kubernetes.io/projected/3bf3157b-44d1-4bb3-b185-71523a80c054-kube-api-access-nvshn\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583076 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-cni-multus\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583100 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-bin\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583121 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwnjl\" (UniqueName: \"kubernetes.io/projected/66ed4999-426b-4615-bfb3-764a3ecc950f-kube-api-access-nwnjl\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583135 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-kubelet\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583178 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-multus-certs\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583194 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b76qk\" (UniqueName: \"kubernetes.io/projected/f714b7db-082f-4c2c-8239-ba5df6986c13-kube-api-access-b76qk\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583210 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-kubelet\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583242 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-netns\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583258 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-hostroot\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583271 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-env-overrides\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583284 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-netd\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583298 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d8335d3f-417e-4114-b306-a3d8f6c31348-proxy-tls\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583311 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-var-lib-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583324 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-os-release\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583370 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zdw4\" (UniqueName: \"kubernetes.io/projected/e1217d91-8c47-4353-b363-96c9de2cdb56-kube-api-access-6zdw4\") pod \"node-resolver-c97s6\" (UID: \"e1217d91-8c47-4353-b363-96c9de2cdb56\") " pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583408 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-system-cni-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583425 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-config\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583442 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7prvf\" (UniqueName: \"kubernetes.io/projected/d8335d3f-417e-4114-b306-a3d8f6c31348-kube-api-access-7prvf\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.583464 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-ovn\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.595878 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.611074 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.626381 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.649021 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.680605 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.680645 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.680622 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.680757 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.680826 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.680891 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683825 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b76qk\" (UniqueName: \"kubernetes.io/projected/f714b7db-082f-4c2c-8239-ba5df6986c13-kube-api-access-b76qk\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683862 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-kubelet\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683880 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-bin\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683897 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwnjl\" (UniqueName: \"kubernetes.io/projected/66ed4999-426b-4615-bfb3-764a3ecc950f-kube-api-access-nwnjl\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683912 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-kubelet\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683929 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-multus-certs\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683970 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-kubelet\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684013 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-kubelet\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.683983 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-bin\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684114 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-multus-certs\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684247 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-env-overrides\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684267 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-netns\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684282 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-hostroot\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684299 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-var-lib-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684345 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-hostroot\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684344 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-netns\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684381 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-netd\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684401 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d8335d3f-417e-4114-b306-a3d8f6c31348-proxy-tls\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684405 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-var-lib-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684416 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-os-release\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684445 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-netd\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684450 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zdw4\" (UniqueName: \"kubernetes.io/projected/e1217d91-8c47-4353-b363-96c9de2cdb56-kube-api-access-6zdw4\") pod \"node-resolver-c97s6\" (UID: \"e1217d91-8c47-4353-b363-96c9de2cdb56\") " pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684504 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-system-cni-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684538 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-ovn\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684559 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-config\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684580 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7prvf\" (UniqueName: \"kubernetes.io/projected/d8335d3f-417e-4114-b306-a3d8f6c31348-kube-api-access-7prvf\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684608 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-script-lib\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684631 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-system-cni-dir\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684651 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-os-release\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684671 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f714b7db-082f-4c2c-8239-ba5df6986c13-cni-binary-copy\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684696 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-slash\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e1217d91-8c47-4353-b363-96c9de2cdb56-hosts-file\") pod \"node-resolver-c97s6\" (UID: \"e1217d91-8c47-4353-b363-96c9de2cdb56\") " pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684732 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-os-release\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684737 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-cnibin\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684764 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66ed4999-426b-4615-bfb3-764a3ecc950f-ovn-node-metrics-cert\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684790 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684809 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-cni-bin\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684816 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-env-overrides\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684837 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-cnibin\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684854 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-etc-kubernetes\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684874 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-daemon-config\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684883 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-system-cni-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684893 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684910 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-log-socket\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684925 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d8335d3f-417e-4114-b306-a3d8f6c31348-mcd-auth-proxy-config\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684941 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-socket-dir-parent\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684958 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3bf3157b-44d1-4bb3-b185-71523a80c054-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684976 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-conf-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684995 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3bf3157b-44d1-4bb3-b185-71523a80c054-cni-binary-copy\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685028 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-systemd-units\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685047 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-ovn-kubernetes\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685068 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d8335d3f-417e-4114-b306-a3d8f6c31348-rootfs\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685086 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685103 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-cni-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685146 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-netns\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685161 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-systemd\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685177 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-etc-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685195 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-node-log\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685224 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-k8s-cni-cncf-io\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685272 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvshn\" (UniqueName: \"kubernetes.io/projected/3bf3157b-44d1-4bb3-b185-71523a80c054-kube-api-access-nvshn\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685280 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-script-lib\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685294 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-cni-multus\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685329 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-conf-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685364 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-cni-multus\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685393 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-netns\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685418 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-systemd\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685445 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-etc-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685474 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-node-log\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685497 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-run-k8s-cni-cncf-io\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685536 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-cni-dir\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685705 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-ovn-kubernetes\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685739 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-systemd-units\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685778 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-config\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685920 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-daemon-config\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685960 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-host-var-lib-cni-bin\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.685983 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-cnibin\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686001 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-etc-kubernetes\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.684856 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-ovn\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686043 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-multus-socket-dir-parent\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686065 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-openvswitch\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686086 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-system-cni-dir\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686143 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686163 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3bf3157b-44d1-4bb3-b185-71523a80c054-os-release\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686189 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d8335d3f-417e-4114-b306-a3d8f6c31348-rootfs\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686211 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-log-socket\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686257 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e1217d91-8c47-4353-b363-96c9de2cdb56-hosts-file\") pod \"node-resolver-c97s6\" (UID: \"e1217d91-8c47-4353-b363-96c9de2cdb56\") " pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686280 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-slash\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686307 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f714b7db-082f-4c2c-8239-ba5df6986c13-cni-binary-copy\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686309 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f714b7db-082f-4c2c-8239-ba5df6986c13-cnibin\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686330 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686366 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3bf3157b-44d1-4bb3-b185-71523a80c054-cni-binary-copy\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686504 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d8335d3f-417e-4114-b306-a3d8f6c31348-mcd-auth-proxy-config\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.686633 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3bf3157b-44d1-4bb3-b185-71523a80c054-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.688541 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d8335d3f-417e-4114-b306-a3d8f6c31348-proxy-tls\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.692716 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66ed4999-426b-4615-bfb3-764a3ecc950f-ovn-node-metrics-cert\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.705009 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.715737 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zdw4\" (UniqueName: \"kubernetes.io/projected/e1217d91-8c47-4353-b363-96c9de2cdb56-kube-api-access-6zdw4\") pod \"node-resolver-c97s6\" (UID: \"e1217d91-8c47-4353-b363-96c9de2cdb56\") " pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.717779 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b76qk\" (UniqueName: \"kubernetes.io/projected/f714b7db-082f-4c2c-8239-ba5df6986c13-kube-api-access-b76qk\") pod \"multus-jwgvx\" (UID: \"f714b7db-082f-4c2c-8239-ba5df6986c13\") " pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.720818 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwnjl\" (UniqueName: \"kubernetes.io/projected/66ed4999-426b-4615-bfb3-764a3ecc950f-kube-api-access-nwnjl\") pod \"ovnkube-node-5twn4\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.727754 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.732350 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvshn\" (UniqueName: \"kubernetes.io/projected/3bf3157b-44d1-4bb3-b185-71523a80c054-kube-api-access-nvshn\") pod \"multus-additional-cni-plugins-wm76s\" (UID: \"3bf3157b-44d1-4bb3-b185-71523a80c054\") " pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.738547 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7prvf\" (UniqueName: \"kubernetes.io/projected/d8335d3f-417e-4114-b306-a3d8f6c31348-kube-api-access-7prvf\") pod \"machine-config-daemon-t9slw\" (UID: \"d8335d3f-417e-4114-b306-a3d8f6c31348\") " pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.757544 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.785924 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wm76s" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.786947 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.800110 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jwgvx" Oct 03 13:31:49 crc kubenswrapper[4861]: W1003 13:31:49.807066 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bf3157b_44d1_4bb3_b185_71523a80c054.slice/crio-80efc0291b9f7655871f0eff781c293f811e70b1876d75221cfe8bc68a03c0e2 WatchSource:0}: Error finding container 80efc0291b9f7655871f0eff781c293f811e70b1876d75221cfe8bc68a03c0e2: Status 404 returned error can't find the container with id 80efc0291b9f7655871f0eff781c293f811e70b1876d75221cfe8bc68a03c0e2 Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.809341 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:49 crc kubenswrapper[4861]: W1003 13:31:49.810326 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf714b7db_082f_4c2c_8239_ba5df6986c13.slice/crio-42c9924c04f13d6341c9f6e5948d5859c2dac8cb09f841014424d1395f35b344 WatchSource:0}: Error finding container 42c9924c04f13d6341c9f6e5948d5859c2dac8cb09f841014424d1395f35b344: Status 404 returned error can't find the container with id 42c9924c04f13d6341c9f6e5948d5859c2dac8cb09f841014424d1395f35b344 Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.817922 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-c97s6" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.820341 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.825043 4861 scope.go:117] "RemoveContainer" containerID="cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9" Oct 03 13:31:49 crc kubenswrapper[4861]: E1003 13:31:49.825286 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.825579 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.831291 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.832464 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwgvx" event={"ID":"f714b7db-082f-4c2c-8239-ba5df6986c13","Type":"ContainerStarted","Data":"42c9924c04f13d6341c9f6e5948d5859c2dac8cb09f841014424d1395f35b344"} Oct 03 13:31:49 crc kubenswrapper[4861]: W1003 13:31:49.848562 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1217d91_8c47_4353_b363_96c9de2cdb56.slice/crio-e2cf3f83b2f06fcc6b109e8ffe076909dcbe0643e2c474aa7e808ed23b706fcf WatchSource:0}: Error finding container e2cf3f83b2f06fcc6b109e8ffe076909dcbe0643e2c474aa7e808ed23b706fcf: Status 404 returned error can't find the container with id e2cf3f83b2f06fcc6b109e8ffe076909dcbe0643e2c474aa7e808ed23b706fcf Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.854265 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83021bb2c43e49c7ea58c52228b8b97bfe846d23c2dd3eaba432c0ecfea78f33\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:41Z\\\",\\\"message\\\":\\\"W1003 13:31:29.881099 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1003 13:31:29.881520 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759498289 cert, and key in /tmp/serving-cert-3863988678/serving-signer.crt, /tmp/serving-cert-3863988678/serving-signer.key\\\\nI1003 13:31:30.211791 1 observer_polling.go:159] Starting file observer\\\\nW1003 13:31:30.215003 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1003 13:31:30.215163 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:30.217928 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3863988678/tls.crt::/tmp/serving-cert-3863988678/tls.key\\\\\\\"\\\\nF1003 13:31:40.785503 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: W1003 13:31:49.855795 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8335d3f_417e_4114_b306_a3d8f6c31348.slice/crio-5d3cf5d4e680f7e1914df1ab4164347417ba36a83d157374b06c3ae7d45e3be5 WatchSource:0}: Error finding container 5d3cf5d4e680f7e1914df1ab4164347417ba36a83d157374b06c3ae7d45e3be5: Status 404 returned error can't find the container with id 5d3cf5d4e680f7e1914df1ab4164347417ba36a83d157374b06c3ae7d45e3be5 Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.870672 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.890718 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.910256 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.932511 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.959694 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:49 crc kubenswrapper[4861]: I1003 13:31:49.982927 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:49Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.002448 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.024092 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.042698 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.066148 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.084425 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.101029 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.115272 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.135643 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.154827 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.171412 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.187317 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.836461 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" exitCode=0 Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.836535 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.836657 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"2837ee081d61d6690d27de9787d553e2e97701af87910f6df2680dee8faa326c"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.837760 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwgvx" event={"ID":"f714b7db-082f-4c2c-8239-ba5df6986c13","Type":"ContainerStarted","Data":"163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.839680 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.839737 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.839754 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"5d3cf5d4e680f7e1914df1ab4164347417ba36a83d157374b06c3ae7d45e3be5"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.840859 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.842684 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-c97s6" event={"ID":"e1217d91-8c47-4353-b363-96c9de2cdb56","Type":"ContainerStarted","Data":"70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.842721 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-c97s6" event={"ID":"e1217d91-8c47-4353-b363-96c9de2cdb56","Type":"ContainerStarted","Data":"e2cf3f83b2f06fcc6b109e8ffe076909dcbe0643e2c474aa7e808ed23b706fcf"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.846015 4861 generic.go:334] "Generic (PLEG): container finished" podID="3bf3157b-44d1-4bb3-b185-71523a80c054" containerID="3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0" exitCode=0 Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.846103 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerDied","Data":"3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.846168 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerStarted","Data":"80efc0291b9f7655871f0eff781c293f811e70b1876d75221cfe8bc68a03c0e2"} Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.846632 4861 scope.go:117] "RemoveContainer" containerID="cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9" Oct 03 13:31:50 crc kubenswrapper[4861]: E1003 13:31:50.846808 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.861215 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.888059 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.905905 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.919690 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.948594 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.974071 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:50 crc kubenswrapper[4861]: I1003 13:31:50.996125 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:50Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.010826 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.030441 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.052516 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.070612 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.087986 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.103046 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.123873 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.142512 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.155207 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.170047 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.181452 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.197419 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.215004 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.246510 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.269963 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.288731 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.303332 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.401493 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.401598 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.401629 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.401647 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.401663 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401711 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:31:55.401677948 +0000 UTC m=+29.399663005 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401760 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401797 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401812 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:55.401796651 +0000 UTC m=+29.399781708 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401880 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401877 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401901 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401921 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401928 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401938 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401897 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:55.401878693 +0000 UTC m=+29.399863740 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.401998 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:55.401988185 +0000 UTC m=+29.399973352 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.402015 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:31:55.402006786 +0000 UTC m=+29.399991973 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.583320 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.587455 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.591445 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.599147 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.611604 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.625336 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.642920 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.657169 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.671089 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.680609 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.680631 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.680731 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.680750 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.680860 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:51 crc kubenswrapper[4861]: E1003 13:31:51.680962 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.687469 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.706214 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.718329 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.732057 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.744862 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.755924 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.769839 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.780261 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.795320 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.809174 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.822218 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.841785 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.852974 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.853034 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.853047 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.853057 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.853065 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.853074 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.854970 4861 generic.go:334] "Generic (PLEG): container finished" podID="3bf3157b-44d1-4bb3-b185-71523a80c054" containerID="ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904" exitCode=0 Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.855202 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerDied","Data":"ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904"} Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.860003 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.875815 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.899183 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.917157 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.932337 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.950837 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.962676 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.980273 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:51 crc kubenswrapper[4861]: I1003 13:31:51.991825 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:51Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.006009 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.018252 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.030494 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.043400 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.080031 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.100664 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.126975 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.142214 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.156221 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.168461 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.179955 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.860667 4861 generic.go:334] "Generic (PLEG): container finished" podID="3bf3157b-44d1-4bb3-b185-71523a80c054" containerID="4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d" exitCode=0 Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.860705 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerDied","Data":"4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d"} Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.874211 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.887440 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.905540 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.920646 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.934436 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.946813 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.961023 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.976994 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:52 crc kubenswrapper[4861]: I1003 13:31:52.989432 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.000915 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:52Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.015323 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.029434 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.041584 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.680750 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:53 crc kubenswrapper[4861]: E1003 13:31:53.680884 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.681163 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.681217 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:53 crc kubenswrapper[4861]: E1003 13:31:53.681275 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:53 crc kubenswrapper[4861]: E1003 13:31:53.681379 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.866864 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.869464 4861 generic.go:334] "Generic (PLEG): container finished" podID="3bf3157b-44d1-4bb3-b185-71523a80c054" containerID="d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d" exitCode=0 Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.869515 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerDied","Data":"d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d"} Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.870237 4861 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.871633 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.871663 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.871673 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.871741 4861 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.879297 4861 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.879546 4861 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.880436 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.880457 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.880465 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.880478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.880488 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:53Z","lastTransitionTime":"2025-10-03T13:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.893943 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: E1003 13:31:53.904836 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.908632 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.908668 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.908710 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.908727 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.908737 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:53Z","lastTransitionTime":"2025-10-03T13:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.949093 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:53 crc kubenswrapper[4861]: I1003 13:31:53.974894 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: E1003 13:31:54.003897 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:53Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.010957 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.010995 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.011007 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.011022 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.011034 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.015242 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: E1003 13:31:54.026410 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.037568 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.037619 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.037627 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.037641 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.037651 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.037668 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: E1003 13:31:54.052973 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.054181 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.057301 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.057322 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.057329 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.057341 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.057349 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.075151 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: E1003 13:31:54.075456 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: E1003 13:31:54.075591 4861 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.077307 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.077350 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.077361 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.077377 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.077799 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.094444 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.109411 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.124807 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.138185 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.154149 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.164799 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.181183 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.181579 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.181589 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.181607 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.181621 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.285190 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.285224 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.285253 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.285268 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.285281 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.387835 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.387869 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.387880 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.387894 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.387904 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.490361 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.490394 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.490405 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.490422 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.490433 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.592860 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.592887 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.592894 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.592907 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.592916 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.694545 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.694767 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.694874 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.694957 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.695039 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.797009 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.797047 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.797075 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.797091 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.797100 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.876315 4861 generic.go:334] "Generic (PLEG): container finished" podID="3bf3157b-44d1-4bb3-b185-71523a80c054" containerID="0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63" exitCode=0 Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.876363 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerDied","Data":"0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.892748 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.899541 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.899568 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.899576 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.899588 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.899596 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:54Z","lastTransitionTime":"2025-10-03T13:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.907312 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.923116 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.937493 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.952344 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.962007 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.976793 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:54 crc kubenswrapper[4861]: I1003 13:31:54.990076 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.001802 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:54Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.003789 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.003822 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.003834 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.003851 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.003862 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.013340 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.025021 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.037689 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.056427 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.105802 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.105840 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.105851 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.105865 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.105875 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.208306 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.208344 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.208353 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.208369 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.208381 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.310372 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.310407 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.310417 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.310440 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.310449 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.413333 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.413378 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.413389 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.413410 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.413422 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.441768 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.441870 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.441898 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.441926 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:03.441906579 +0000 UTC m=+37.439891626 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.441949 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.441981 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442014 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:03.442006981 +0000 UTC m=+37.439992028 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442014 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.441979 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442039 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:03.442033162 +0000 UTC m=+37.440018209 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442062 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442157 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442176 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442081 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442259 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:03.442219727 +0000 UTC m=+37.440204774 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442268 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442284 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.442316 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:03.442307519 +0000 UTC m=+37.440292566 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.515451 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.515500 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.515510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.515527 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.515537 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.617725 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.617772 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.617785 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.617803 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.617815 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.680489 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.680564 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.680588 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.680641 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.680691 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:55 crc kubenswrapper[4861]: E1003 13:31:55.680738 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.719497 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.719532 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.719541 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.719554 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.719563 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.823613 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.824218 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.824252 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.824276 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.824290 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.883600 4861 generic.go:334] "Generic (PLEG): container finished" podID="3bf3157b-44d1-4bb3-b185-71523a80c054" containerID="b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490" exitCode=0 Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.883677 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerDied","Data":"b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.900135 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.916031 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.927072 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.927104 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.927112 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.927125 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.927134 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:55Z","lastTransitionTime":"2025-10-03T13:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.930243 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.946398 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.959458 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.971585 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:55 crc kubenswrapper[4861]: I1003 13:31:55.989523 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:55Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.004367 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.019251 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.031624 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.031654 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.031662 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.031710 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.031723 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.036777 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.054411 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.071887 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.084436 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.134919 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.134981 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.134991 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.135008 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.135021 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.237255 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.237514 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.237601 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.237692 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.237765 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.241767 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-n974h"] Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.242141 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.244104 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.245076 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.245317 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.246045 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.256945 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.271473 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.284645 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.295864 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.310735 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.322267 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.335962 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.339580 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.339601 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.339609 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.339621 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.339629 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.349708 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.351848 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/25670d98-45f4-4308-9576-f6f532c422ec-serviceca\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.351880 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/25670d98-45f4-4308-9576-f6f532c422ec-host\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.351916 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2cs7\" (UniqueName: \"kubernetes.io/projected/25670d98-45f4-4308-9576-f6f532c422ec-kube-api-access-s2cs7\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.361060 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.376150 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.388077 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.405321 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.418885 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.431378 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.444056 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.444102 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.444114 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.444130 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.444148 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.452379 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/25670d98-45f4-4308-9576-f6f532c422ec-serviceca\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.452412 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/25670d98-45f4-4308-9576-f6f532c422ec-host\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.452458 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2cs7\" (UniqueName: \"kubernetes.io/projected/25670d98-45f4-4308-9576-f6f532c422ec-kube-api-access-s2cs7\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.452594 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/25670d98-45f4-4308-9576-f6f532c422ec-host\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.453426 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/25670d98-45f4-4308-9576-f6f532c422ec-serviceca\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.543049 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2cs7\" (UniqueName: \"kubernetes.io/projected/25670d98-45f4-4308-9576-f6f532c422ec-kube-api-access-s2cs7\") pod \"node-ca-n974h\" (UID: \"25670d98-45f4-4308-9576-f6f532c422ec\") " pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.546874 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.546925 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.546934 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.546947 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.546957 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.555433 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-n974h" Oct 03 13:31:56 crc kubenswrapper[4861]: W1003 13:31:56.568205 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25670d98_45f4_4308_9576_f6f532c422ec.slice/crio-3c69ac8822dc906fbab777a7e5726cbfa38d2519cad42e2017dbd3962bfe707b WatchSource:0}: Error finding container 3c69ac8822dc906fbab777a7e5726cbfa38d2519cad42e2017dbd3962bfe707b: Status 404 returned error can't find the container with id 3c69ac8822dc906fbab777a7e5726cbfa38d2519cad42e2017dbd3962bfe707b Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.649137 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.649171 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.649180 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.649193 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.649202 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.692303 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.704624 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.716726 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.732443 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.751328 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.751366 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.751375 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.751390 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.751401 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.752659 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.766894 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.784536 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.797166 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.812732 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.828204 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.840776 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.855607 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.855674 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.855702 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.855727 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.855742 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.856464 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.869200 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.887208 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.894049 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.894329 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.895013 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-n974h" event={"ID":"25670d98-45f4-4308-9576-f6f532c422ec","Type":"ContainerStarted","Data":"3c69ac8822dc906fbab777a7e5726cbfa38d2519cad42e2017dbd3962bfe707b"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.906374 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.918038 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.926892 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.931137 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.944005 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.961294 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.961336 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.961351 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.961371 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.961391 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:56Z","lastTransitionTime":"2025-10-03T13:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.962732 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.978643 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:56 crc kubenswrapper[4861]: I1003 13:31:56.991184 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:56Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.003274 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.017621 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.032064 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.042069 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.053031 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.061939 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.063794 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.063833 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.063844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.063858 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.063867 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.072454 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.083442 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.093897 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.104536 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.118143 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.151742 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.166588 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.166618 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.166627 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.166639 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.166647 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.170314 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.189517 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.210584 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.223207 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.234219 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.244859 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.253166 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.267139 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.268184 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.268217 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.268249 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.268268 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.268286 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.277468 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.370839 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.370881 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.370892 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.370910 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.370921 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.473502 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.473573 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.473589 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.473615 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.473630 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.576187 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.576222 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.576350 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.576370 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.576382 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.678326 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.678362 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.678372 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.678390 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.678401 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.680883 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:57 crc kubenswrapper[4861]: E1003 13:31:57.681016 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.681402 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:57 crc kubenswrapper[4861]: E1003 13:31:57.681522 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.681717 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:57 crc kubenswrapper[4861]: E1003 13:31:57.682062 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.780656 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.780695 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.780707 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.780724 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.780735 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.883643 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.883961 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.884117 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.884227 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.884405 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.899669 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-n974h" event={"ID":"25670d98-45f4-4308-9576-f6f532c422ec","Type":"ContainerStarted","Data":"d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.904365 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" event={"ID":"3bf3157b-44d1-4bb3-b185-71523a80c054","Type":"ContainerStarted","Data":"8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.904525 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.904592 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.915623 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.924479 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.931194 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.941760 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.955833 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.972841 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.985924 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.986729 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.986779 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.986787 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.986803 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.986815 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:57Z","lastTransitionTime":"2025-10-03T13:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:57 crc kubenswrapper[4861]: I1003 13:31:57.999102 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:57Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.015501 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.027972 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.044431 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.058874 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.069364 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.083940 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.089268 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.089291 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.089299 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.089314 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.089323 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.096932 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.110089 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.123545 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.135458 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.148295 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.165330 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.178814 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.191812 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.191849 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.191858 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.191873 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.191881 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.193002 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.206193 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.217948 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.230242 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.245704 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.257664 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.273934 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.288564 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:31:58Z is after 2025-08-24T17:21:41Z" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.294035 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.294080 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.294090 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.294105 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.294116 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.396669 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.396717 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.396727 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.396745 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.396758 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.498959 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.499010 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.499020 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.499038 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.499049 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.601218 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.601296 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.601309 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.601327 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.601339 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.703556 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.703594 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.703605 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.703624 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.703635 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.805460 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.805487 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.805495 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.805509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.805518 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.911013 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.911079 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.911096 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.911117 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:58 crc kubenswrapper[4861]: I1003 13:31:58.911129 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:58Z","lastTransitionTime":"2025-10-03T13:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.013676 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.013716 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.013727 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.013740 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.013750 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.116267 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.116304 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.116314 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.116329 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.116339 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.217907 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.217953 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.217971 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.217988 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.217999 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.320007 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.320333 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.320434 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.320521 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.320612 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.423557 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.423627 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.423640 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.423665 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.423974 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.526849 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.526882 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.526891 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.526905 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.526913 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.629106 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.629134 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.629142 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.629155 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.629164 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.680894 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.680935 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.680985 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:31:59 crc kubenswrapper[4861]: E1003 13:31:59.681017 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:31:59 crc kubenswrapper[4861]: E1003 13:31:59.681102 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:31:59 crc kubenswrapper[4861]: E1003 13:31:59.681166 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.731528 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.731781 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.731866 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.731961 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.732048 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.834323 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.834358 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.834368 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.834384 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.834394 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.937346 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.937376 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.937385 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.937398 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:31:59 crc kubenswrapper[4861]: I1003 13:31:59.937406 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:31:59Z","lastTransitionTime":"2025-10-03T13:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.039675 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.039723 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.039739 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.039757 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.039768 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.145721 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.145772 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.145785 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.145804 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.145816 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.247828 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.247863 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.247872 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.247885 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.247893 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.350039 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.350071 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.350083 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.350098 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.350108 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.452304 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.452346 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.452355 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.452369 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.452379 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.554756 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.554806 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.554819 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.554839 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.554852 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.657266 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.657302 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.657314 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.657328 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.657339 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.759914 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.759956 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.759969 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.760025 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.760040 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.862674 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.862733 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.862743 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.862760 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.862771 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.914420 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/0.log" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.917558 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c" exitCode=1 Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.917591 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.918173 4861 scope.go:117] "RemoveContainer" containerID="89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.933452 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:00Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.945734 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:00Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.961993 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:00Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.965355 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.965410 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.965420 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.965435 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.965447 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:00Z","lastTransitionTime":"2025-10-03T13:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.974916 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:00Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:00 crc kubenswrapper[4861]: I1003 13:32:00.988065 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:00Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.002265 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:00Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.015589 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.030223 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.043358 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.062149 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:00Z\\\",\\\"message\\\":\\\"pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271059 6031 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 13:32:00.271086 6031 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271106 6031 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 13:32:00.271131 6031 factory.go:656] Stopping watch factory\\\\nI1003 13:32:00.271145 6031 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 13:32:00.271065 6031 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271199 6031 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271291 6031 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 13:32:00.271622 6031 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.071654 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.071686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.071697 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.071711 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.071720 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.075795 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.088856 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.100658 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.116462 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.173757 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.173792 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.173801 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.173815 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.173827 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.325492 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.325533 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.325545 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.325559 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.325569 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.427606 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.427649 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.427659 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.427674 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.427684 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.529819 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.529850 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.529861 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.529877 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.529894 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.631616 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.631647 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.631655 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.631669 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.631677 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.680427 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.680506 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.680427 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:01 crc kubenswrapper[4861]: E1003 13:32:01.680559 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:01 crc kubenswrapper[4861]: E1003 13:32:01.680665 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:01 crc kubenswrapper[4861]: E1003 13:32:01.680752 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.709135 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl"] Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.709618 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.712147 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.712319 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.729339 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.733688 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.733710 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.733718 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.733730 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.733740 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.744949 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.754605 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.766007 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.778743 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.790125 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.802345 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.822139 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:00Z\\\",\\\"message\\\":\\\"pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271059 6031 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 13:32:00.271086 6031 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271106 6031 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 13:32:00.271131 6031 factory.go:656] Stopping watch factory\\\\nI1003 13:32:00.271145 6031 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 13:32:00.271065 6031 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271199 6031 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271291 6031 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 13:32:00.271622 6031 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.829828 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9671621a-5831-4fc4-8508-08b284d1cf88-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.829864 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv2d7\" (UniqueName: \"kubernetes.io/projected/9671621a-5831-4fc4-8508-08b284d1cf88-kube-api-access-sv2d7\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.829911 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9671621a-5831-4fc4-8508-08b284d1cf88-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.830075 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9671621a-5831-4fc4-8508-08b284d1cf88-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.834374 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.835815 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.835847 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.835858 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.835895 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.835907 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.851328 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.867188 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.879508 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.892660 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.909284 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.922343 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/0.log" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.922662 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.925050 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.925427 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.930808 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9671621a-5831-4fc4-8508-08b284d1cf88-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.930842 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9671621a-5831-4fc4-8508-08b284d1cf88-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.930860 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv2d7\" (UniqueName: \"kubernetes.io/projected/9671621a-5831-4fc4-8508-08b284d1cf88-kube-api-access-sv2d7\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.930937 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9671621a-5831-4fc4-8508-08b284d1cf88-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.931490 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9671621a-5831-4fc4-8508-08b284d1cf88-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.932073 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9671621a-5831-4fc4-8508-08b284d1cf88-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.936460 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9671621a-5831-4fc4-8508-08b284d1cf88-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.937745 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.937778 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.937789 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.937808 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.937820 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:01Z","lastTransitionTime":"2025-10-03T13:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.949490 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.949621 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv2d7\" (UniqueName: \"kubernetes.io/projected/9671621a-5831-4fc4-8508-08b284d1cf88-kube-api-access-sv2d7\") pod \"ovnkube-control-plane-749d76644c-hw4vl\" (UID: \"9671621a-5831-4fc4-8508-08b284d1cf88\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.958990 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.971775 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.981808 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:01 crc kubenswrapper[4861]: I1003 13:32:01.990604 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.008498 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:00Z\\\",\\\"message\\\":\\\"pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271059 6031 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 13:32:00.271086 6031 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271106 6031 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 13:32:00.271131 6031 factory.go:656] Stopping watch factory\\\\nI1003 13:32:00.271145 6031 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 13:32:00.271065 6031 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271199 6031 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271291 6031 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 13:32:00.271622 6031 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.019906 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.022331 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.035978 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.039393 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.039411 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.039418 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.039431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.039439 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.055562 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.068080 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.080818 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.093556 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.108683 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.128584 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.141625 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.141680 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.141692 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.141708 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.141719 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.142584 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.244611 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.244634 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.244642 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.244654 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.244662 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.346816 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.346846 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.346855 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.346869 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.346878 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.448782 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.448829 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.448838 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.448854 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.448867 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.551286 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.551628 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.551707 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.551782 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.551886 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.654632 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.654925 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.655032 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.655116 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.655174 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.683030 4861 scope.go:117] "RemoveContainer" containerID="cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.757433 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.757464 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.757472 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.757485 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.757493 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.816763 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-cft42"] Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.817169 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:02 crc kubenswrapper[4861]: E1003 13:32:02.817266 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.831886 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.841911 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shz7z\" (UniqueName: \"kubernetes.io/projected/267128bb-f8b8-4d69-99a3-ba3af795218c-kube-api-access-shz7z\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.842119 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.844088 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.857889 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.859401 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.859446 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.859457 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.859473 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.859484 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.879408 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:00Z\\\",\\\"message\\\":\\\"pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271059 6031 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 13:32:00.271086 6031 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271106 6031 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 13:32:00.271131 6031 factory.go:656] Stopping watch factory\\\\nI1003 13:32:00.271145 6031 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 13:32:00.271065 6031 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271199 6031 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271291 6031 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 13:32:00.271622 6031 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.891136 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.900008 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.910082 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.922173 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.930004 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/1.log" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.931029 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/0.log" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.932857 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b" exitCode=1 Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.932900 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.932931 4861 scope.go:117] "RemoveContainer" containerID="89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.934301 4861 scope.go:117] "RemoveContainer" containerID="98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.934514 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: E1003 13:32:02.934676 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.935197 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" event={"ID":"9671621a-5831-4fc4-8508-08b284d1cf88","Type":"ContainerStarted","Data":"5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.935249 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" event={"ID":"9671621a-5831-4fc4-8508-08b284d1cf88","Type":"ContainerStarted","Data":"61cc448a355144e0f1eb395e56a5c87df47fff6aac9fe9a3d0b597ab5a001ae2"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.942880 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shz7z\" (UniqueName: \"kubernetes.io/projected/267128bb-f8b8-4d69-99a3-ba3af795218c-kube-api-access-shz7z\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.942922 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:02 crc kubenswrapper[4861]: E1003 13:32:02.944585 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:02 crc kubenswrapper[4861]: E1003 13:32:02.944638 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:32:03.444620418 +0000 UTC m=+37.442605465 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.953678 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.964170 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shz7z\" (UniqueName: \"kubernetes.io/projected/267128bb-f8b8-4d69-99a3-ba3af795218c-kube-api-access-shz7z\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.964944 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.965078 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.965158 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.965283 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.965370 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:02Z","lastTransitionTime":"2025-10-03T13:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.968472 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.978521 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:02 crc kubenswrapper[4861]: I1003 13:32:02.987704 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.000947 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:02Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.013386 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.023151 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.038054 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.047367 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.058687 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.068049 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.068073 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.068080 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.068093 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.068101 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.070447 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.079568 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.089299 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.105056 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89687e56af7bdba3d942add0c731be946a5aa1b19d9cd6db2fd80c30de87c15c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:00Z\\\",\\\"message\\\":\\\"pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271059 6031 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 13:32:00.271086 6031 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271106 6031 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 13:32:00.271131 6031 factory.go:656] Stopping watch factory\\\\nI1003 13:32:00.271145 6031 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 13:32:00.271065 6031 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271199 6031 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 13:32:00.271291 6031 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 13:32:00.271622 6031 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.115354 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.125821 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.135337 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.145065 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.156554 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.168390 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.169831 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.169861 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.169954 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.169984 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.170010 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.181551 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.192284 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.202037 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.272735 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.272804 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.272827 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.272856 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.272878 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.374699 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.374738 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.374747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.374761 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.374770 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.447454 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.447570 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.447616 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447644 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:19.44762007 +0000 UTC m=+53.445605117 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.447689 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.447746 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447759 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.447780 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447815 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:19.447797994 +0000 UTC m=+53.445783091 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447885 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447921 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:32:04.447914237 +0000 UTC m=+38.445899284 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447978 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.447989 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448000 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448022 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:19.44801587 +0000 UTC m=+53.446000917 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448067 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448092 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:19.448084401 +0000 UTC m=+53.446069548 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448141 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448151 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448158 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.448178 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:19.448173044 +0000 UTC m=+53.446158091 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.477453 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.477504 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.477516 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.477804 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.477823 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.580460 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.580724 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.580841 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.580951 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.581076 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.680554 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.680677 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.680568 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.680743 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.680554 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.680800 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.683008 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.683043 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.683052 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.683064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.683074 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.784813 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.784851 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.784862 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.784878 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.784888 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.886512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.886542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.886554 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.886568 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.886577 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.939798 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/1.log" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.943073 4861 scope.go:117] "RemoveContainer" containerID="98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b" Oct 03 13:32:03 crc kubenswrapper[4861]: E1003 13:32:03.943259 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.944339 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" event={"ID":"9671621a-5831-4fc4-8508-08b284d1cf88","Type":"ContainerStarted","Data":"53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.945642 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.946809 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.947274 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.961066 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.969929 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.982791 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.989050 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.989081 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.989089 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.989103 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.989111 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:03Z","lastTransitionTime":"2025-10-03T13:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:03 crc kubenswrapper[4861]: I1003 13:32:03.997134 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:03Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.009643 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.018687 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.029539 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.044292 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.057931 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.075818 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.090112 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.090799 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.090829 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.090839 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.090855 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.090865 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.100660 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.110895 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.122443 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.135920 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.147969 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.162974 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.174364 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.189608 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.193436 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.193477 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.193489 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.193506 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.193519 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.202703 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.214521 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.216533 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.216656 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.216734 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.216817 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.216895 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.231816 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.233997 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.237653 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.237686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.237695 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.237709 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.237718 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.247405 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.250434 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.254622 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.254666 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.254674 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.254687 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.254697 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.260046 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.268103 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.271203 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.271257 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.271266 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.271281 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.271292 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.271720 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.282814 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.284724 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.289813 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.289871 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.289881 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.289895 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.289906 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.302188 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.302337 4861 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.303671 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.303706 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.303720 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.303736 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.303748 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.304602 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.316255 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.327553 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.338398 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.348203 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.361332 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:04Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.406317 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.406353 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.406363 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.406380 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.406391 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.456481 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.456639 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.456819 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:32:06.456681909 +0000 UTC m=+40.454666976 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.508976 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.509017 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.509029 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.509046 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.509058 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.612300 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.612364 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.612381 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.612404 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.612423 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.681140 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:04 crc kubenswrapper[4861]: E1003 13:32:04.681424 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.714741 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.714783 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.714795 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.714811 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.714825 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.817145 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.817185 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.817195 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.817208 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.817216 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.919146 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.919195 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.919203 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.919216 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:04 crc kubenswrapper[4861]: I1003 13:32:04.919225 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:04Z","lastTransitionTime":"2025-10-03T13:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.021137 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.021175 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.021187 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.021201 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.021212 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.124449 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.124521 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.124535 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.124565 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.124580 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.227312 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.227348 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.227359 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.227375 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.227387 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.329706 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.329775 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.329787 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.329806 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.329818 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.433198 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.433255 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.433263 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.433275 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.433285 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.536590 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.536637 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.536649 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.536662 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.536671 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.640948 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.640992 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.641003 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.641020 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.641031 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.681064 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:05 crc kubenswrapper[4861]: E1003 13:32:05.681205 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.681381 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:05 crc kubenswrapper[4861]: E1003 13:32:05.681501 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.681593 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:05 crc kubenswrapper[4861]: E1003 13:32:05.681675 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.743428 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.743487 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.743498 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.743517 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.743533 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.846131 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.846417 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.846493 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.846566 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.846640 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.948745 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.948797 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.948810 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.948828 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:05 crc kubenswrapper[4861]: I1003 13:32:05.948841 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:05Z","lastTransitionTime":"2025-10-03T13:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.051965 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.052003 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.052014 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.052028 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.052039 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.154089 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.154124 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.154135 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.154150 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.154163 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.256203 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.256245 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.256254 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.256266 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.256274 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.358485 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.358799 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.358921 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.359010 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.359078 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.461610 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.461638 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.461647 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.461661 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.461672 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.471823 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:06 crc kubenswrapper[4861]: E1003 13:32:06.471950 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:06 crc kubenswrapper[4861]: E1003 13:32:06.472258 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:32:10.472218973 +0000 UTC m=+44.470204030 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.563833 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.563862 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.563879 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.563898 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.563910 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.666421 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.666646 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.666715 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.666824 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.666906 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.679953 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:06 crc kubenswrapper[4861]: E1003 13:32:06.680049 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.696158 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.706552 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.719792 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.731214 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.740928 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.750215 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.768514 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.768589 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.768599 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.768628 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.768638 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.769277 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.784531 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.800734 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.812124 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.829492 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.839685 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.855861 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.869209 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.870467 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.870499 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.870510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.870525 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.870537 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.881592 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.892173 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:06Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.973581 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.973655 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.973669 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.973688 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:06 crc kubenswrapper[4861]: I1003 13:32:06.973727 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:06Z","lastTransitionTime":"2025-10-03T13:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.076799 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.077426 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.077520 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.077664 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.077779 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.181160 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.181187 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.181196 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.181210 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.181218 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.284818 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.284855 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.284881 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.284902 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.284914 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.387474 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.387512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.387521 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.387535 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.387547 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.490834 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.490867 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.490875 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.490910 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.490920 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.593567 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.593618 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.593627 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.593640 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.593649 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.680556 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.680588 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.680642 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:07 crc kubenswrapper[4861]: E1003 13:32:07.680709 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:07 crc kubenswrapper[4861]: E1003 13:32:07.680816 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:07 crc kubenswrapper[4861]: E1003 13:32:07.680901 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.695994 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.696045 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.696054 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.696071 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.696080 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.799313 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.799341 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.799349 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.799362 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.799371 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.901662 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.901902 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.901965 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.902053 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:07 crc kubenswrapper[4861]: I1003 13:32:07.902119 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:07Z","lastTransitionTime":"2025-10-03T13:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.004478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.004509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.004517 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.004530 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.004539 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.107613 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.107692 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.107729 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.107746 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.107758 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.210749 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.210794 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.210805 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.210821 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.210832 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.313084 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.313339 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.313462 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.313551 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.313641 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.416773 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.416988 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.417070 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.417187 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.417295 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.520553 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.520622 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.520635 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.520653 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.520666 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.622973 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.623023 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.623046 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.623073 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.623104 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.681051 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:08 crc kubenswrapper[4861]: E1003 13:32:08.681186 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.726009 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.726058 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.726071 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.726090 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.726105 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.828571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.828805 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.828994 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.829064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.829117 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.931281 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.931319 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.931339 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.931356 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:08 crc kubenswrapper[4861]: I1003 13:32:08.931367 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:08Z","lastTransitionTime":"2025-10-03T13:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.034662 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.034691 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.034700 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.034712 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.034723 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.137468 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.137507 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.137516 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.137528 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.137536 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.240290 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.240332 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.240342 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.240356 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.240365 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.342190 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.342248 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.342266 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.342305 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.342318 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.445469 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.445504 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.445514 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.445529 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.445540 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.548640 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.548935 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.548949 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.548962 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.548978 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.653164 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.653508 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.653587 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.653677 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.653755 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.680424 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:09 crc kubenswrapper[4861]: E1003 13:32:09.680538 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.680565 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:09 crc kubenswrapper[4861]: E1003 13:32:09.680699 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.680852 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:09 crc kubenswrapper[4861]: E1003 13:32:09.680994 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.755983 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.756018 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.756027 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.756041 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.756052 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.859350 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.859955 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.860064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.860156 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.860268 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.962981 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.963006 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.963013 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.963026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:09 crc kubenswrapper[4861]: I1003 13:32:09.963071 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:09Z","lastTransitionTime":"2025-10-03T13:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.065747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.065959 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.066052 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.066175 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.066274 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.168657 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.168731 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.168744 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.168757 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.168767 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.270716 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.270766 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.270777 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.270793 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.270805 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.372582 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.372657 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.372678 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.372704 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.372722 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.474411 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.474451 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.474459 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.474472 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.474489 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.511549 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:10 crc kubenswrapper[4861]: E1003 13:32:10.511761 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:10 crc kubenswrapper[4861]: E1003 13:32:10.511840 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:32:18.511821598 +0000 UTC m=+52.509806645 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.577211 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.577264 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.577274 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.577291 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.577301 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.679304 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.679389 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.679404 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.679426 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.679440 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.680463 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:10 crc kubenswrapper[4861]: E1003 13:32:10.680577 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.781798 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.781833 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.781844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.781859 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.781869 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.884425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.884475 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.884488 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.884511 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.884524 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.987148 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.987430 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.987544 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.987611 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:10 crc kubenswrapper[4861]: I1003 13:32:10.987700 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:10Z","lastTransitionTime":"2025-10-03T13:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.090151 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.090526 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.090598 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.090661 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.090730 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.194172 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.195055 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.195186 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.195356 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.195478 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.298413 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.298484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.298508 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.298542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.298566 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.400427 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.400689 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.400766 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.400857 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.400927 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.503680 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.503956 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.504100 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.504204 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.504348 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.607077 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.607134 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.607148 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.607170 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.607187 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.680705 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.680761 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.680832 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:11 crc kubenswrapper[4861]: E1003 13:32:11.680946 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:11 crc kubenswrapper[4861]: E1003 13:32:11.681109 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:11 crc kubenswrapper[4861]: E1003 13:32:11.681289 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.709908 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.709936 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.709944 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.709957 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.709966 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.812186 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.812215 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.812222 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.812254 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.812262 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.914007 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.914042 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.914051 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.914064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:11 crc kubenswrapper[4861]: I1003 13:32:11.914073 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:11Z","lastTransitionTime":"2025-10-03T13:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.016415 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.016687 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.016769 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.016846 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.016904 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.119316 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.119348 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.119361 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.119377 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.119388 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.222578 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.223408 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.223451 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.223484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.223508 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.325535 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.325571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.325581 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.325595 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.325607 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.428563 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.428614 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.428624 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.428638 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.428647 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.531818 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.531863 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.531875 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.531891 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.531905 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.634706 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.634749 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.634791 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.634810 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.634822 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.680369 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:12 crc kubenswrapper[4861]: E1003 13:32:12.680504 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.737084 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.737128 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.737140 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.737158 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.737170 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.839448 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.839482 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.839492 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.839510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.839521 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.942150 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.942208 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.942262 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.942286 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:12 crc kubenswrapper[4861]: I1003 13:32:12.942306 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:12Z","lastTransitionTime":"2025-10-03T13:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.045251 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.045296 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.045305 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.045321 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.045333 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.148918 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.148968 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.148980 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.148998 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.149008 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.251924 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.251966 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.251974 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.251988 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.251999 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.355023 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.355057 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.355068 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.355085 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.355099 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.457133 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.457174 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.457182 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.457196 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.457205 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.559219 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.559547 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.559682 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.559807 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.559903 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.662428 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.662483 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.662494 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.662512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.662528 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.680757 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.680762 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:13 crc kubenswrapper[4861]: E1003 13:32:13.680997 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.680783 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:13 crc kubenswrapper[4861]: E1003 13:32:13.680899 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:13 crc kubenswrapper[4861]: E1003 13:32:13.681251 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.764625 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.764660 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.764670 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.764684 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.764693 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.867569 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.867914 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.867925 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.867943 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.867953 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.971501 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.971542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.971552 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.971567 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:13 crc kubenswrapper[4861]: I1003 13:32:13.971578 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:13Z","lastTransitionTime":"2025-10-03T13:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.073807 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.073844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.073853 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.073867 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.073889 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.175987 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.176048 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.176070 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.176095 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.176115 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.278548 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.278634 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.278647 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.278663 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.278707 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.379471 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.379822 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.379950 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.380045 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.380171 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.393287 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:14Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.397086 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.397252 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.397345 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.397565 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.397647 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.411176 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:14Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.416650 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.416707 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.416721 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.416742 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.416759 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.430276 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:14Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.435263 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.435291 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.435300 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.435314 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.435325 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.446449 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:14Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.450542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.450749 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.450849 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.450983 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.451105 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.462738 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:14Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.462846 4861 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.464127 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.464149 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.464157 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.464169 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.464178 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.566525 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.566593 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.566610 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.566629 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.566645 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.669611 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.669709 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.669726 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.669751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.669821 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.681441 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:14 crc kubenswrapper[4861]: E1003 13:32:14.681593 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.772222 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.772284 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.772296 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.772312 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.772323 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.874431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.874467 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.874477 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.874494 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.874507 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.977623 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.977682 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.977695 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.977713 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:14 crc kubenswrapper[4861]: I1003 13:32:14.977724 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:14Z","lastTransitionTime":"2025-10-03T13:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.080330 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.080376 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.080394 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.080414 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.080427 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.182934 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.182962 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.182970 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.182982 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.182990 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.285181 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.285223 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.285254 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.285270 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.285281 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.387538 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.387571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.387579 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.387591 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.387600 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.489788 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.489816 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.489824 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.489838 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.489846 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.592495 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.592546 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.592562 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.592581 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.592595 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.680731 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.680771 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:15 crc kubenswrapper[4861]: E1003 13:32:15.680922 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:15 crc kubenswrapper[4861]: E1003 13:32:15.680955 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.680776 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:15 crc kubenswrapper[4861]: E1003 13:32:15.681396 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.696347 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.696391 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.696405 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.696422 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.696435 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.798893 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.798920 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.798928 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.798940 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.798949 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.901721 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.901759 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.901770 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.901785 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:15 crc kubenswrapper[4861]: I1003 13:32:15.901795 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:15Z","lastTransitionTime":"2025-10-03T13:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.004506 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.004549 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.004559 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.004574 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.004584 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.106425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.106701 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.106792 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.106881 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.107000 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.209659 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.209955 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.210090 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.210205 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.210323 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.312438 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.312470 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.312478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.312492 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.312502 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.449400 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.449456 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.449469 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.449486 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.449497 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.551540 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.551575 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.551586 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.551601 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.551611 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.653944 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.653977 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.653986 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.654000 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.654010 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.681014 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:16 crc kubenswrapper[4861]: E1003 13:32:16.681148 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.695934 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.710168 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.720140 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.732671 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.743558 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.756751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.756783 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.756791 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.756805 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.756813 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.761971 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.776185 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.787063 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.800722 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.810679 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.821482 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.831711 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.841506 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.851466 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.859315 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.859373 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.859395 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.859409 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.859418 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.868366 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.883594 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:16Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.962159 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.962192 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.962202 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.962218 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:16 crc kubenswrapper[4861]: I1003 13:32:16.962269 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:16Z","lastTransitionTime":"2025-10-03T13:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.064846 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.064893 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.064904 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.064921 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.064931 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.166935 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.166997 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.167008 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.167022 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.167030 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.269653 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.269686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.269696 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.269708 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.269717 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.372119 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.372165 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.372177 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.372191 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.372200 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.474743 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.474786 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.474798 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.474816 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.474828 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.576754 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.576800 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.576811 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.576825 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.576835 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.679265 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.679292 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.679300 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.679312 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.679321 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.680957 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:17 crc kubenswrapper[4861]: E1003 13:32:17.681040 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.681176 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:17 crc kubenswrapper[4861]: E1003 13:32:17.681250 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.681522 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:17 crc kubenswrapper[4861]: E1003 13:32:17.681571 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.781118 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.781168 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.781180 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.781195 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.781206 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.883576 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.883607 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.883638 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.883656 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.883667 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.985776 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.985817 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.985829 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.985849 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:17 crc kubenswrapper[4861]: I1003 13:32:17.985859 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:17Z","lastTransitionTime":"2025-10-03T13:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.088567 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.088630 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.088642 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.088658 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.088670 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.190899 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.190932 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.190940 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.190954 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.190965 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.293628 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.293667 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.293677 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.293691 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.293700 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.396219 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.396927 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.397026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.397130 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.397303 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.413595 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.428424 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.448574 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.462349 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.476988 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.500127 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.501127 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.501192 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.501207 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.501225 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.501257 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.514016 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.530575 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.543256 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.555612 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.570191 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.590991 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:18 crc kubenswrapper[4861]: E1003 13:32:18.591125 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:18 crc kubenswrapper[4861]: E1003 13:32:18.591183 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:32:34.591167341 +0000 UTC m=+68.589152398 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.596549 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.602985 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.603026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.603038 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.603058 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.603069 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.627866 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.644687 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.654886 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.667217 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.680551 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:18 crc kubenswrapper[4861]: E1003 13:32:18.680702 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.682025 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:18Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.705412 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.705625 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.705690 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.705751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.705807 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.808121 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.808150 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.808160 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.808173 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.808182 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.911126 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.911637 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.911848 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.912046 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:18 crc kubenswrapper[4861]: I1003 13:32:18.912225 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:18Z","lastTransitionTime":"2025-10-03T13:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.015516 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.015553 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.015565 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.015581 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.015593 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.118276 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.118355 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.118373 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.118394 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.118436 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.220576 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.220827 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.220909 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.220999 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.221124 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.323189 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.323478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.323618 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.323709 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.323809 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.426173 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.426737 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.426819 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.426926 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.426993 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.498187 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498359 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:51.498325246 +0000 UTC m=+85.496310293 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.498414 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.498484 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.498507 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.498526 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498600 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498626 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498639 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498649 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498646 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498668 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498751 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498604 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498688 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:51.498672075 +0000 UTC m=+85.496657182 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498814 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:51.498791139 +0000 UTC m=+85.496776186 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498832 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:51.49882529 +0000 UTC m=+85.496810437 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.498844 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:32:51.49883769 +0000 UTC m=+85.496822837 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.529486 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.529747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.529868 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.529962 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.530020 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.631871 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.632117 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.632202 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.632362 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.632440 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.681067 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.681481 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.681512 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.681532 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.681853 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:19 crc kubenswrapper[4861]: E1003 13:32:19.681934 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.682090 4861 scope.go:117] "RemoveContainer" containerID="98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.734986 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.735034 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.735048 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.735066 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.735078 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.837874 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.837915 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.837928 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.837943 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.837952 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.940014 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.940043 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.940051 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.940067 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.940077 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:19Z","lastTransitionTime":"2025-10-03T13:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.996098 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/1.log" Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.999041 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} Oct 03 13:32:19 crc kubenswrapper[4861]: I1003 13:32:19.999447 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.011809 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.024017 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.038461 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.043108 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.043148 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.043159 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.043174 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.043185 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.051287 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.062994 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.078629 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.090172 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.102362 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.114454 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.124957 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.137900 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.145919 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.145954 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.145970 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.145984 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.146020 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.152430 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.171396 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.186846 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.200056 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.212043 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:20Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.248762 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.249042 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.249111 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.249184 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.249266 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.352712 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.352793 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.352809 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.352828 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.352844 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.455930 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.455982 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.455994 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.456011 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.456024 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.558216 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.558262 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.558270 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.558282 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.558291 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.660940 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.660988 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.660999 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.661016 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.661027 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.680616 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:20 crc kubenswrapper[4861]: E1003 13:32:20.680764 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.784365 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.784406 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.784420 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.784435 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.784449 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.886759 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.886791 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.886800 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.886814 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.886823 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.988888 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.988917 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.988925 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.988938 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:20 crc kubenswrapper[4861]: I1003 13:32:20.988947 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:20Z","lastTransitionTime":"2025-10-03T13:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.003079 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/2.log" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.003550 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/1.log" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.005868 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" exitCode=1 Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.005940 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.005977 4861 scope.go:117] "RemoveContainer" containerID="98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.006696 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:32:21 crc kubenswrapper[4861]: E1003 13:32:21.006846 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.023300 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.034390 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.048770 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.062173 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.074091 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.088879 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.090831 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.090863 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.090874 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.090889 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.090899 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.101101 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.115059 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.133612 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://98cc8bd6dcdd5a486b40084c8a564a82868e13805b4c65ddaf39f5c3fe11266b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 13:32:01.716595 6227 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:01Z is after 2025-08-24T17:21:41Z]\\\\nI100\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.146465 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.157436 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.168818 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.182501 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.193444 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.193484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.193493 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.193515 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.193525 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.194965 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.203988 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.216874 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:21Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.296275 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.296304 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.296315 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.296331 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.296341 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.399525 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.399608 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.399622 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.399644 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.399659 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.502379 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.502446 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.502462 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.502483 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.502495 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.605992 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.606073 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.606097 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.606129 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.606152 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.680123 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.680167 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.680288 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:21 crc kubenswrapper[4861]: E1003 13:32:21.680304 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:21 crc kubenswrapper[4861]: E1003 13:32:21.680434 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:21 crc kubenswrapper[4861]: E1003 13:32:21.680528 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.708823 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.709194 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.709206 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.709245 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.709258 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.811005 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.811046 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.811057 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.811073 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.811082 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.913955 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.914034 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.914058 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.914087 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:21 crc kubenswrapper[4861]: I1003 13:32:21.914110 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:21Z","lastTransitionTime":"2025-10-03T13:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.011444 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/2.log" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.014678 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:32:22 crc kubenswrapper[4861]: E1003 13:32:22.014825 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.015801 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.015844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.015854 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.015866 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.015877 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.028935 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.039723 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.055468 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.066979 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.077377 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.079349 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.090590 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.090989 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.104567 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.118211 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.118641 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.118682 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.118697 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.118717 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.118728 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.140196 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.151499 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.162930 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.173914 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.184635 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.195544 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.206405 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.217273 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.220466 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.220494 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.220503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.220518 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.220528 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.231204 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.240803 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.252579 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.265422 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.275266 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.291452 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.302793 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.315315 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.322649 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.322685 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.322696 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.322713 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.322724 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.329984 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.340820 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.359995 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.371964 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68b52998-d285-4825-b267-5023797f02b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.383819 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.395718 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.406831 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.417732 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.424994 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.425046 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.425057 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.425081 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.425097 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.429260 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:22Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.527716 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.528301 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.528317 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.528334 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.528345 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.635322 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.635360 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.635371 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.635386 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.635397 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.680929 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:22 crc kubenswrapper[4861]: E1003 13:32:22.681072 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.738497 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.738581 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.738603 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.738631 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.738652 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.840844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.840879 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.840894 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.840909 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.840919 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.943097 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.943161 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.943175 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.943191 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:22 crc kubenswrapper[4861]: I1003 13:32:22.943203 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:22Z","lastTransitionTime":"2025-10-03T13:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.045959 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.046018 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.046028 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.046059 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.046070 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.148690 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.148738 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.148756 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.148778 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.148795 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.250869 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.250902 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.250911 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.250923 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.250932 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.353656 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.353731 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.353742 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.353758 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.353772 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.455700 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.455740 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.455753 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.455768 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.455780 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.557989 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.558034 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.558045 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.558064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.558078 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.659903 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.659938 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.659947 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.659962 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.659971 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.680425 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.680504 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:23 crc kubenswrapper[4861]: E1003 13:32:23.680545 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.680574 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:23 crc kubenswrapper[4861]: E1003 13:32:23.680630 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:23 crc kubenswrapper[4861]: E1003 13:32:23.680692 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.762564 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.762607 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.762615 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.762629 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.762638 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.864882 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.864929 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.864943 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.864959 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.864969 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.967437 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.967478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.967490 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.967508 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:23 crc kubenswrapper[4861]: I1003 13:32:23.967522 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:23Z","lastTransitionTime":"2025-10-03T13:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.069263 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.069294 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.069301 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.069313 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.069321 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.171614 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.171658 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.171672 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.171691 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.171709 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.274503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.274556 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.274567 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.274582 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.274592 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.377102 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.377165 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.377180 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.377196 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.377240 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.480752 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.480801 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.480812 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.480827 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.480837 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.583933 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.583979 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.583991 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.584008 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.584020 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.680609 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.680767 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.685648 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.685688 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.685699 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.685714 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.685726 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.787812 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.787845 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.787853 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.787865 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.787877 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.789061 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.789123 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.789135 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.789151 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.789163 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.804767 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:24Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.808649 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.808686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.808696 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.808709 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.808718 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.823307 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:24Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.826713 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.826843 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.826950 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.827068 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.827181 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.839968 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:24Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.844383 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.844425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.844435 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.844448 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.844459 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.857717 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:24Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.861409 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.861455 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.861468 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.861484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.861496 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.874695 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:24Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:24 crc kubenswrapper[4861]: E1003 13:32:24.874833 4861 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.889985 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.890297 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.890369 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.890438 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.890506 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.992255 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.992326 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.992338 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.992350 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:24 crc kubenswrapper[4861]: I1003 13:32:24.992359 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:24Z","lastTransitionTime":"2025-10-03T13:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.094700 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.094743 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.094753 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.094774 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.094787 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.197303 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.197340 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.197351 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.197369 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.197382 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.299790 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.299826 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.299842 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.299856 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.299866 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.403417 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.403494 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.403519 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.403554 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.403575 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.508026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.508084 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.508105 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.508126 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.508140 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.610520 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.610564 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.610576 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.610592 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.610603 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.680600 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.680608 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:25 crc kubenswrapper[4861]: E1003 13:32:25.680825 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:25 crc kubenswrapper[4861]: E1003 13:32:25.680742 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.680621 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:25 crc kubenswrapper[4861]: E1003 13:32:25.680900 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.715421 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.715456 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.715464 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.715478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.715486 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.817259 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.817287 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.817294 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.817307 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.817315 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.919195 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.919256 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.919271 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.919284 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:25 crc kubenswrapper[4861]: I1003 13:32:25.919292 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:25Z","lastTransitionTime":"2025-10-03T13:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.021658 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.021686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.021696 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.021710 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.021721 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.123091 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.123125 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.123135 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.123147 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.123156 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.225637 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.225678 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.225687 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.225703 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.225714 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.327658 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.327708 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.327721 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.327737 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.327750 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.430604 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.430682 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.430695 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.430713 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.430723 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.532371 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.532411 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.532421 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.532434 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.532444 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.634731 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.634767 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.634775 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.634789 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.634799 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.680966 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:26 crc kubenswrapper[4861]: E1003 13:32:26.681369 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.697513 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.708757 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.721831 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.735394 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.737932 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.737984 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.737997 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.738017 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.738029 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.748646 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.759246 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.769797 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68b52998-d285-4825-b267-5023797f02b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.786744 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.798410 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.809989 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.826065 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.842368 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.842407 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.842416 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.842431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.842447 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.844510 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.856658 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.867603 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.880998 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.893541 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.904935 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:26Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.944484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.944528 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.944540 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.944554 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:26 crc kubenswrapper[4861]: I1003 13:32:26.944563 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:26Z","lastTransitionTime":"2025-10-03T13:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.047407 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.047494 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.047509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.047527 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.047540 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.149990 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.150028 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.150037 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.150052 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.150064 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.252547 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.252587 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.252598 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.252615 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.252626 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.355345 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.355429 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.355447 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.355475 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.355492 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.458090 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.458152 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.458161 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.458175 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.458185 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.560653 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.560684 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.560692 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.560704 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.560712 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.664193 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.664258 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.664270 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.664284 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.664293 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.680801 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.680860 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:27 crc kubenswrapper[4861]: E1003 13:32:27.680960 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.680976 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:27 crc kubenswrapper[4861]: E1003 13:32:27.681081 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:27 crc kubenswrapper[4861]: E1003 13:32:27.681168 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.766526 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.766585 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.766596 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.766611 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.766623 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.868887 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.868936 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.868948 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.868965 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.868977 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.971303 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.971385 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.971408 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.971439 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:27 crc kubenswrapper[4861]: I1003 13:32:27.971463 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:27Z","lastTransitionTime":"2025-10-03T13:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.073840 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.073872 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.073880 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.073909 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.073924 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.176974 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.177032 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.177042 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.177058 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.177068 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.279589 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.279631 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.279642 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.279661 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.279672 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.381607 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.381653 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.381666 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.381685 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.381699 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.483444 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.483483 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.483494 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.483506 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.483514 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.586154 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.586208 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.586294 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.586322 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.586339 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.680589 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:28 crc kubenswrapper[4861]: E1003 13:32:28.680833 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.687932 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.687973 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.687986 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.688003 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.688014 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.790714 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.790754 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.790765 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.790780 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.790791 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.893864 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.893914 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.893925 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.893944 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.893959 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.997512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.997567 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.997583 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.997603 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:28 crc kubenswrapper[4861]: I1003 13:32:28.997615 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:28Z","lastTransitionTime":"2025-10-03T13:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.099438 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.099691 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.099781 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.099868 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.099934 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.202508 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.202751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.202759 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.202802 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.202810 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.304811 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.304850 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.304870 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.304887 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.304899 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.406685 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.406717 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.406725 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.406737 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.406746 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.509773 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.510149 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.510274 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.510393 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.510483 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.613077 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.613119 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.613130 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.613146 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.613156 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.680047 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.680078 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:29 crc kubenswrapper[4861]: E1003 13:32:29.680180 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:29 crc kubenswrapper[4861]: E1003 13:32:29.680357 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.680079 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:29 crc kubenswrapper[4861]: E1003 13:32:29.680687 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.714976 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.715022 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.715034 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.715051 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.715062 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.818347 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.818375 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.818384 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.818397 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.818418 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.920344 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.920384 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.920395 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.920429 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:29 crc kubenswrapper[4861]: I1003 13:32:29.920439 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:29Z","lastTransitionTime":"2025-10-03T13:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.022589 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.022644 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.022655 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.022670 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.022684 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.125112 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.125179 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.125189 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.125204 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.125214 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.227351 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.227409 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.227421 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.227436 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.227447 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.329893 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.329936 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.329947 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.329964 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.329975 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.432129 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.432166 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.432179 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.432195 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.432207 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.535256 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.535317 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.535330 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.535344 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.535356 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.637594 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.637628 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.637638 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.637650 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.637658 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.682609 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:30 crc kubenswrapper[4861]: E1003 13:32:30.682714 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.739290 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.739316 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.739324 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.739336 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.739345 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.841740 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.841762 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.841772 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.841786 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.841796 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.943542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.943566 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.943575 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.943587 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:30 crc kubenswrapper[4861]: I1003 13:32:30.943595 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:30Z","lastTransitionTime":"2025-10-03T13:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.045442 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.045493 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.045505 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.045538 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.045550 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.147918 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.147959 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.147969 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.147984 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.147996 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.250512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.250560 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.250584 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.250605 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.250620 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.352599 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.352635 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.352643 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.352656 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.352668 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.454508 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.454547 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.454556 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.454570 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.454579 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.557269 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.557303 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.557313 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.557325 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.557333 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.659860 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.659900 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.659912 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.659928 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.659942 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.680678 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.680710 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.680767 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:31 crc kubenswrapper[4861]: E1003 13:32:31.680793 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:31 crc kubenswrapper[4861]: E1003 13:32:31.680917 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:31 crc kubenswrapper[4861]: E1003 13:32:31.680978 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.762461 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.762513 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.762526 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.762542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.762554 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.864402 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.864431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.864440 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.864452 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.864460 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.966602 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.966633 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.966642 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.966656 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:31 crc kubenswrapper[4861]: I1003 13:32:31.966664 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:31Z","lastTransitionTime":"2025-10-03T13:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.069397 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.069453 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.069465 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.069486 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.069498 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.171665 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.171704 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.171713 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.171728 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.171738 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.274018 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.274060 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.274068 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.274081 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.274091 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.376634 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.376688 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.376704 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.376723 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.376771 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.478698 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.478737 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.478748 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.478762 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.478772 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.581331 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.581376 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.581387 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.581404 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.581414 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.680489 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:32 crc kubenswrapper[4861]: E1003 13:32:32.680948 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.683901 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.683934 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.683945 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.683961 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.683973 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.786203 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.786259 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.786268 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.786284 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.786293 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.888078 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.888129 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.888141 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.888157 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.888168 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.994669 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.994701 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.994710 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.994725 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:32 crc kubenswrapper[4861]: I1003 13:32:32.994734 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:32Z","lastTransitionTime":"2025-10-03T13:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.096839 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.096881 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.096890 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.096904 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.096912 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.198549 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.198584 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.198592 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.198604 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.198616 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.300449 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.300479 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.300487 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.300502 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.300511 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.402948 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.402995 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.403012 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.403029 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.403040 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.504980 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.505025 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.505036 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.505053 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.505066 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.607206 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.607266 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.607274 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.607288 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.607298 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.680763 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.680869 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:33 crc kubenswrapper[4861]: E1003 13:32:33.680981 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.681137 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:33 crc kubenswrapper[4861]: E1003 13:32:33.681329 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:33 crc kubenswrapper[4861]: E1003 13:32:33.681508 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.714179 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.714218 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.714256 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.714273 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.714284 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.816531 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.817154 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.817268 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.817384 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.817598 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.919901 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.919946 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.919956 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.919971 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:33 crc kubenswrapper[4861]: I1003 13:32:33.919981 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:33Z","lastTransitionTime":"2025-10-03T13:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.022599 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.022655 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.022668 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.022688 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.022701 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.125040 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.125073 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.125084 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.125099 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.125110 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.227064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.227342 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.227449 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.227541 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.227611 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.333633 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.333675 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.333686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.333704 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.333714 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.436046 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.436084 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.436095 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.436113 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.436124 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.538418 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.538449 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.538457 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.538471 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.538482 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.611627 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:34 crc kubenswrapper[4861]: E1003 13:32:34.611782 4861 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:34 crc kubenswrapper[4861]: E1003 13:32:34.611841 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs podName:267128bb-f8b8-4d69-99a3-ba3af795218c nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.611826498 +0000 UTC m=+100.609811545 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs") pod "network-metrics-daemon-cft42" (UID: "267128bb-f8b8-4d69-99a3-ba3af795218c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.640151 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.640190 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.640201 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.640216 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.640246 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.680828 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:34 crc kubenswrapper[4861]: E1003 13:32:34.680969 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.742013 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.742053 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.742065 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.742079 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.742090 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.844895 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.844941 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.844953 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.844969 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.844982 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.947001 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.947045 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.947058 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.947073 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:34 crc kubenswrapper[4861]: I1003 13:32:34.947084 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:34Z","lastTransitionTime":"2025-10-03T13:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.049527 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.049568 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.049578 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.049592 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.049603 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.062720 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.062755 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.062763 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.062777 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.062786 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.074876 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:35Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.078131 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.078177 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.078187 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.078201 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.078211 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.089278 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:35Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.092068 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.092099 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.092109 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.092125 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.092135 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.104070 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:35Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.107054 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.107089 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.107102 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.107117 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.107129 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.117433 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:35Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.120437 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.120495 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.120509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.120523 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.120532 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.132875 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:35Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.133047 4861 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.152152 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.152185 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.152196 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.152211 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.152222 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.253948 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.254252 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.254352 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.254426 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.254499 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.356489 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.356531 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.356542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.356560 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.356573 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.459860 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.459898 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.459911 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.459928 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.459939 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.562497 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.562526 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.562535 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.562549 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.562557 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.664670 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.664709 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.664719 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.664734 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.664743 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.680355 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.680366 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.680383 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.680840 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.681025 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.681056 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.681134 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:35 crc kubenswrapper[4861]: E1003 13:32:35.681290 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.766941 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.766980 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.766988 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.767001 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.767010 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.870539 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.870631 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.870647 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.870679 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.870697 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.973447 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.973484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.973493 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.973509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:35 crc kubenswrapper[4861]: I1003 13:32:35.973519 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:35Z","lastTransitionTime":"2025-10-03T13:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.075467 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.075506 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.075514 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.075528 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.075538 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.177521 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.177550 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.177559 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.177571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.177581 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.279843 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.279880 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.279889 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.279902 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.279911 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.382429 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.382475 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.382486 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.382512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.382525 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.484936 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.484970 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.484980 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.484995 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.485003 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.587087 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.587127 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.587136 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.587149 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.587159 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.680449 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:36 crc kubenswrapper[4861]: E1003 13:32:36.680594 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.689033 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.689085 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.689096 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.689109 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.689119 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.696388 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.706809 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.716120 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.726145 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.738432 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.749989 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.768088 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.780338 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68b52998-d285-4825-b267-5023797f02b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.792522 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.792573 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.792586 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.792604 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.792615 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.797930 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.812204 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.822897 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.833411 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.844783 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.855308 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.866299 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.883369 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.894452 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.894483 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.894492 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.894506 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.894516 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.897973 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:36Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.996062 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.996101 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.996109 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.996124 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:36 crc kubenswrapper[4861]: I1003 13:32:36.996134 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:36Z","lastTransitionTime":"2025-10-03T13:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.098068 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.098383 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.098467 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.098558 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.098619 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.200961 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.201252 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.201355 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.201439 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.201522 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.303907 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.303941 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.303950 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.303962 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.303971 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.405545 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.405594 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.405608 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.405624 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.405636 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.507376 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.507408 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.507418 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.507431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.507442 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.609686 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.609730 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.609743 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.609759 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.609770 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.680883 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.680916 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.681013 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:37 crc kubenswrapper[4861]: E1003 13:32:37.681141 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:37 crc kubenswrapper[4861]: E1003 13:32:37.681214 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:37 crc kubenswrapper[4861]: E1003 13:32:37.681344 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.711571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.711617 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.711626 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.711639 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.711650 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.814123 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.814151 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.814159 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.814171 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.814180 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.916358 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.916399 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.916410 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.916427 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:37 crc kubenswrapper[4861]: I1003 13:32:37.916440 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:37Z","lastTransitionTime":"2025-10-03T13:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.019439 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.019890 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.019973 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.020060 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.020131 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.070881 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwgvx_f714b7db-082f-4c2c-8239-ba5df6986c13/kube-multus/0.log" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.070937 4861 generic.go:334] "Generic (PLEG): container finished" podID="f714b7db-082f-4c2c-8239-ba5df6986c13" containerID="163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f" exitCode=1 Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.070987 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwgvx" event={"ID":"f714b7db-082f-4c2c-8239-ba5df6986c13","Type":"ContainerDied","Data":"163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.071458 4861 scope.go:117] "RemoveContainer" containerID="163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.083865 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.095720 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.110690 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:37Z\\\",\\\"message\\\":\\\"2025-10-03T13:31:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cae56917-e465-4c7f-ba8e-e9cb8cb67572\\\\n2025-10-03T13:31:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cae56917-e465-4c7f-ba8e-e9cb8cb67572 to /host/opt/cni/bin/\\\\n2025-10-03T13:31:52Z [verbose] multus-daemon started\\\\n2025-10-03T13:31:52Z [verbose] Readiness Indicator file check\\\\n2025-10-03T13:32:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.121538 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.122987 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.123043 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.123052 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.123065 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.123074 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.132248 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.145128 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.157330 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.170025 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.183533 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.193112 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.203769 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.223946 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.224929 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.224964 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.224975 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.224991 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.225001 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.241483 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68b52998-d285-4825-b267-5023797f02b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.257932 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.273352 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.287047 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.298861 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:38Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.330477 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.330501 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.330510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.330522 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.330530 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.432835 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.432898 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.432910 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.432926 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.432940 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.534969 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.535025 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.535037 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.535053 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.535064 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.637370 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.637406 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.637416 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.637428 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.637437 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.681103 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:38 crc kubenswrapper[4861]: E1003 13:32:38.681296 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.739752 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.739782 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.739791 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.739803 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.739812 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.842145 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.842182 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.842191 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.842205 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.842217 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.944413 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.944449 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.944462 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.944477 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:38 crc kubenswrapper[4861]: I1003 13:32:38.944488 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:38Z","lastTransitionTime":"2025-10-03T13:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.046669 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.046721 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.046730 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.046745 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.046754 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.075616 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwgvx_f714b7db-082f-4c2c-8239-ba5df6986c13/kube-multus/0.log" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.075679 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwgvx" event={"ID":"f714b7db-082f-4c2c-8239-ba5df6986c13","Type":"ContainerStarted","Data":"428a5d17c11b15a2616655e5d2597ca82ffecd4eaecc97e33c3143839af4fda0"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.089540 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.100313 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.111353 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.123650 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.140933 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.148704 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.148739 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.148747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.148763 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.148775 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.153513 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68b52998-d285-4825-b267-5023797f02b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.167144 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.182011 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.195981 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.209401 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.223275 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.236719 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.249707 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://428a5d17c11b15a2616655e5d2597ca82ffecd4eaecc97e33c3143839af4fda0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:37Z\\\",\\\"message\\\":\\\"2025-10-03T13:31:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cae56917-e465-4c7f-ba8e-e9cb8cb67572\\\\n2025-10-03T13:31:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cae56917-e465-4c7f-ba8e-e9cb8cb67572 to /host/opt/cni/bin/\\\\n2025-10-03T13:31:52Z [verbose] multus-daemon started\\\\n2025-10-03T13:31:52Z [verbose] Readiness Indicator file check\\\\n2025-10-03T13:32:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.251409 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.251443 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.251451 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.251466 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.251475 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.262331 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.273336 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.288057 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.299415 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:39Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.353171 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.353206 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.353216 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.353251 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.353263 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.455678 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.455714 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.455723 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.455738 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.455748 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.557910 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.557966 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.557977 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.557996 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.558008 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.660509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.660552 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.660562 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.660577 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.660587 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.680897 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.680986 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.681018 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:39 crc kubenswrapper[4861]: E1003 13:32:39.681120 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:39 crc kubenswrapper[4861]: E1003 13:32:39.681192 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:39 crc kubenswrapper[4861]: E1003 13:32:39.681356 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.763148 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.763192 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.763203 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.763220 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.763246 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.865862 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.865922 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.865934 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.865948 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.865959 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.968276 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.968316 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.968326 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.968342 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:39 crc kubenswrapper[4861]: I1003 13:32:39.968355 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:39Z","lastTransitionTime":"2025-10-03T13:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.070669 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.070705 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.070713 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.070727 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.070737 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.172795 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.172861 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.172876 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.172895 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.172904 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.274989 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.275036 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.275047 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.275064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.275075 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.377904 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.377944 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.377953 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.377967 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.377989 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.480224 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.480331 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.480350 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.480376 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.480384 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.582387 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.582430 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.582443 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.582461 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.582474 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.681029 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:40 crc kubenswrapper[4861]: E1003 13:32:40.681187 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.685694 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.685729 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.685736 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.685749 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.685758 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.788071 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.788132 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.788145 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.788163 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.788175 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.890512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.890546 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.890555 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.890568 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.890577 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.993123 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.993159 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.993168 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.993181 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:40 crc kubenswrapper[4861]: I1003 13:32:40.993189 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:40Z","lastTransitionTime":"2025-10-03T13:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.095262 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.095334 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.095346 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.095361 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.095372 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.197649 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.197699 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.197708 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.197724 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.197732 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.300078 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.300134 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.300145 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.300164 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.300175 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.402469 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.402503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.402512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.402527 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.402535 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.504523 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.504574 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.504589 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.504609 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.504621 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.607275 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.607312 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.607322 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.607336 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.607347 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.680974 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.681043 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.681045 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:41 crc kubenswrapper[4861]: E1003 13:32:41.681115 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:41 crc kubenswrapper[4861]: E1003 13:32:41.681294 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:41 crc kubenswrapper[4861]: E1003 13:32:41.681345 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.710210 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.710292 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.710305 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.710323 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.710335 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.812492 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.813132 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.813161 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.813179 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.813189 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.915723 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.915789 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.915808 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.915828 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:41 crc kubenswrapper[4861]: I1003 13:32:41.915839 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:41Z","lastTransitionTime":"2025-10-03T13:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.018573 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.018611 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.018620 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.018635 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.018643 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.121477 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.121561 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.121574 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.121596 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.121620 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.223509 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.223547 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.223556 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.223571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.223581 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.326154 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.326246 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.326260 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.326281 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.326300 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.429204 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.429251 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.429274 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.429294 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.429303 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.531994 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.532039 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.532049 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.532064 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.532075 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.634725 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.634764 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.634775 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.634790 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.634800 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.680809 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:42 crc kubenswrapper[4861]: E1003 13:32:42.680990 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.736681 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.736723 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.736733 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.736747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.736757 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.839381 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.839416 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.839425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.839440 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.839450 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.942049 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.942118 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.942145 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.942176 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:42 crc kubenswrapper[4861]: I1003 13:32:42.942197 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:42Z","lastTransitionTime":"2025-10-03T13:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.045935 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.045974 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.045982 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.045998 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.046008 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.148211 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.148282 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.148295 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.148309 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.148321 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.250628 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.250661 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.250672 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.250687 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.250697 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.353347 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.353374 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.353381 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.353396 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.353403 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.456454 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.456486 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.456495 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.456510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.456519 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.558912 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.558962 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.558976 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.558993 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.559004 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.661824 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.661873 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.661884 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.661902 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.661915 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.680768 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.680781 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:43 crc kubenswrapper[4861]: E1003 13:32:43.680919 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:43 crc kubenswrapper[4861]: E1003 13:32:43.681017 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.680781 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:43 crc kubenswrapper[4861]: E1003 13:32:43.681105 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.764929 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.764967 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.764977 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.764991 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.765000 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.868023 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.868060 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.868071 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.868086 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.868096 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.970348 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.970389 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.970401 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.970416 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:43 crc kubenswrapper[4861]: I1003 13:32:43.970426 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:43Z","lastTransitionTime":"2025-10-03T13:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.072446 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.072503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.072516 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.072532 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.072544 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.175915 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.175952 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.176012 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.176027 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.176038 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.278489 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.278536 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.278547 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.278565 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.278577 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.380682 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.380718 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.380730 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.380746 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.380758 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.483053 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.483362 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.483386 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.483403 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.483414 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.585412 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.585478 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.585495 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.585510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.585520 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.680405 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:44 crc kubenswrapper[4861]: E1003 13:32:44.680531 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.687798 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.687835 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.687844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.687857 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.687866 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.790171 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.790201 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.790214 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.790253 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.790263 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.893456 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.893526 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.893550 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.893578 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.893595 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.996687 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.996738 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.996751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.996770 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:44 crc kubenswrapper[4861]: I1003 13:32:44.996793 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:44Z","lastTransitionTime":"2025-10-03T13:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.099143 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.099206 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.099221 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.099264 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.099278 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.174943 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.175020 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.175050 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.175069 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.175084 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.186454 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:45Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.190380 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.190412 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.190420 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.190432 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.190441 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.208784 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:45Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.213806 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.213860 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.213873 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.213894 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.213907 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.228826 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:45Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.233490 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.233516 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.233524 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.233537 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.233546 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.245631 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:45Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.249701 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.249747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.249759 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.249775 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.249787 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.262059 4861 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9c733c76-1447-4e10-91a4-f1aaa7de6132\\\",\\\"systemUUID\\\":\\\"5c5136c5-33d2-4bef-9fd7-5251914e4451\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:45Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.262190 4861 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.264122 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.264176 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.264186 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.264198 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.264207 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.367417 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.367740 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.367839 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.367931 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.368020 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.471101 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.471124 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.471148 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.471161 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.471171 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.574042 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.574089 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.574101 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.574118 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.574131 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.676931 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.676971 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.676981 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.676994 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.677003 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.680360 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.680359 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.680496 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.680564 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.680397 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:45 crc kubenswrapper[4861]: E1003 13:32:45.680631 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.778956 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.779005 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.779015 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.779029 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.779040 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.881142 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.881413 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.881503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.881606 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.881687 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.993969 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.994026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.994039 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.994054 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:45 crc kubenswrapper[4861]: I1003 13:32:45.994065 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:45Z","lastTransitionTime":"2025-10-03T13:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.096777 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.096821 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.096830 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.096844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.096854 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.198893 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.198915 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.198923 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.198935 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.198943 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.300707 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.300745 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.300758 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.300772 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.300784 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.403253 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.403298 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.403315 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.403332 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.403344 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.505652 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.505708 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.505720 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.505758 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.505771 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.608164 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.608503 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.608610 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.608691 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.608764 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.680297 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:46 crc kubenswrapper[4861]: E1003 13:32:46.680446 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.692285 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://876e806fe7d7313a700bd557fe86fe469146eeb63ecd75684c558d04f6ef5862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.710253 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66ed4999-426b-4615-bfb3-764a3ecc950f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:20Z\\\",\\\"message\\\":\\\"ork=default : 7.45µs\\\\nI1003 13:32:20.899156 6445 services_controller.go:356] Processing sync for service openshift-cluster-machine-approver/machine-approver for network=default\\\\nI1003 13:32:20.899162 6445 services_controller.go:360] Finished syncing service machine-approver on namespace openshift-cluster-machine-approver for network=default : 5.081µs\\\\nI1003 13:32:20.899168 6445 services_controller.go:356] Processing sync for service openshift-network-operator/metrics for network=default\\\\nI1003 13:32:20.899173 6445 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 4.6µs\\\\nI1003 13:32:20.899147 6445 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-route-controller-manager/route-controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"18746a4d-8a63-458a-b7e3-8fb89ff95fc0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-route-controller-manager/route-controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:32:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5twn4_openshift-ovn-kubernetes(66ed4999-426b-4615-bfb3-764a3ecc950f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwnjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5twn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.711725 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.711751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.711785 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.711802 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.711814 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.721592 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68b52998-d285-4825-b267-5023797f02b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b22723ce52857bc415cc89ef0ed6c9ce091b425bc8bfba113badbda15a9c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://023e4a3ec0b7e1c5743940abaf3884d6209e3b2aea16acb7d4224c54cbeecb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307fc4c2cff3f8a61720a6a33977b69811de252aa80d359754a27c2018137618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ae54721bb2577ab8bbcf2a0ac4893bfc1895e53ae1de248975f605729c030c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.734919 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.745751 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ceeea9a8c61928b935a6c01f2dda3f9bf0036c2c2792c9338cc580a3296285b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.756654 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.768047 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.782348 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f330284d8d5446236d2bf739c6df75969c865e304c5adab6b1ec40a92baf30d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b55320840755d0a9c8296d19550ae1d7f5cb2f17d286dddc10a0202963bd98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.795659 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwgvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f714b7db-082f-4c2c-8239-ba5df6986c13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://428a5d17c11b15a2616655e5d2597ca82ffecd4eaecc97e33c3143839af4fda0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T13:32:37Z\\\",\\\"message\\\":\\\"2025-10-03T13:31:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cae56917-e465-4c7f-ba8e-e9cb8cb67572\\\\n2025-10-03T13:31:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cae56917-e465-4c7f-ba8e-e9cb8cb67572 to /host/opt/cni/bin/\\\\n2025-10-03T13:31:52Z [verbose] multus-daemon started\\\\n2025-10-03T13:31:52Z [verbose] Readiness Indicator file check\\\\n2025-10-03T13:32:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b76qk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwgvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.805111 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8335d3f-417e-4114-b306-a3d8f6c31348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c79706d97ac0c9214aee8c49206bfb27e579a82781b63cf07bd7b9dc43077402\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7prvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t9slw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.813472 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.813523 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.813533 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.813546 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.813556 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.815845 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9671621a-5831-4fc4-8508-08b284d1cf88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cb2b6048f454a138caea8eca4a26c6e5cd219c9d124f46cfa69c168150b6ee4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e87e91f53c3b754f21de857a8712b51063c580785de43d0c0e89f47185c549\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sv2d7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hw4vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.829258 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wm76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3bf3157b-44d1-4bb3-b185-71523a80c054\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fea8b7da57798521c13d0f35905e5311cd0d8016aa20c37cc0d73c8d6fbc1a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b41a8d9dd0f504f505f1d49f5b7fa39d025f0bc4c49b19ef61691a41bd162b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad87b94fe313e12f4f857a69a0a01f6f34877ded4103f2de3015b588522d9904\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4343077b8251a65be24e9269fa38bd48d628581ef65b43f7fa262286df7b677d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d10915a62560e9a9e386d4029ccd1c6cd9c99209ab32653437c32a6e5cf7e98d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd99ac90e9cf617307233b5899d9da44b563bd5a0969e0a64c4073ee0122b63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20088a595b5a59d1a0339827c7dd169c479a530aed875ac3eeb021b78269490\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvshn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wm76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.840034 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-c97s6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1217d91-8c47-4353-b363-96c9de2cdb56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70a4ac69623752a83655a58cf44ef00fbf88b0321bc83721fbbe16ea746699c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6zdw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-c97s6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.851995 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"587ecce6-1ef4-4f74-a2ba-bd6e9fdb84dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d77b51532e1ed4922634cbfc9360ac49276104c2c3ca115ea522ff423cd7bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://853fb69edcd3e4a27929ab2a6081c40f93553967619663805afb7b626f9c1e39\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75079f3e07d277ab11585e34fc72877ba93a8d0aeaa3f0c8bb214c7c14f9c1b1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfcce0420b92a42aaae0ae0e6aa26b655cd97f6ce5d45b671bbf394217027023\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd18d89b8b81faa6f8232c33ecaec19aafc3d9574a090cbef37bcf26f83a4bf9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T13:31:48Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 13:31:47.746138 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 13:31:47.746280 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 13:31:47.747035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2119377140/tls.crt::/tmp/serving-cert-2119377140/tls.key\\\\\\\"\\\\nI1003 13:31:48.538827 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 13:31:48.544908 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 13:31:48.544935 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 13:31:48.545220 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 13:31:48.545275 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 13:31:48.555911 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 13:31:48.555947 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555953 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 13:31:48.555961 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 13:31:48.555964 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 13:31:48.555968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 13:31:48.555971 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 13:31:48.556259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 13:31:48.559989 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb1af1cb5b66706cd0a0da5a3f6b2c380a771100e61f84ca2c85c28f1878f7f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdf52752b6cec17ac3d33b9c94d08302ff9e7ff88ab9c23590134a36b1384af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T13:31:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.862612 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d0e9cdc-883e-4b67-afb2-2ef5f4b3246d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01eb05b088e421c220145fd833922351aeba4a520944c6b707039785e26ef303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb9bcc84067a58db80e3c7e1b23825baeaff91f97351e9ada3765b6589fda35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80131782150ecbeb45ec2f55e86909b3735ec4f0b09e27e31f6dfc24d6d4ccd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a40838be9fe69f9bebecff82c9f10b4c00e167b7f927682e6b18ff490bd10ad4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.872323 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-n974h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25670d98-45f4-4308-9576-f6f532c422ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9b2d8fb10bb6dc17ca3b4826e1e4b7e8e562e8c8745605cd332268197166b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T13:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2cs7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:31:56Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-n974h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.881899 4861 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cft42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"267128bb-f8b8-4d69-99a3-ba3af795218c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T13:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shz7z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T13:32:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cft42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T13:32:46Z is after 2025-08-24T17:21:41Z" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.915691 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.915747 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.915756 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.915771 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:46 crc kubenswrapper[4861]: I1003 13:32:46.915801 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:46Z","lastTransitionTime":"2025-10-03T13:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.018004 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.018031 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.018041 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.018259 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.018271 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.121164 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.121216 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.121248 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.121265 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.121276 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.224108 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.224157 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.224167 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.224180 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.224190 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.326087 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.326136 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.326168 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.326182 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.326191 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.428151 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.428218 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.428273 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.428291 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.428301 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.530939 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.530988 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.531004 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.531021 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.531034 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.633768 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.633823 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.633841 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.633873 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.633890 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.680877 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.680918 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.680931 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:47 crc kubenswrapper[4861]: E1003 13:32:47.681015 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:47 crc kubenswrapper[4861]: E1003 13:32:47.681161 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:47 crc kubenswrapper[4861]: E1003 13:32:47.681224 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.736253 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.736294 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.736306 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.736321 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.736332 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.838685 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.838729 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.838738 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.838751 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.838760 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.941683 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.941725 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.941736 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.941750 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:47 crc kubenswrapper[4861]: I1003 13:32:47.941759 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:47Z","lastTransitionTime":"2025-10-03T13:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.044447 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.044491 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.044500 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.044513 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.044523 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.146196 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.146241 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.146253 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.146269 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.146280 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.248534 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.248565 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.248573 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.248585 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.248595 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.350744 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.350776 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.350794 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.350808 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.350818 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.453074 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.453105 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.453114 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.453126 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.453135 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.555425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.555466 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.555476 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.555491 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.555501 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.657602 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.657644 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.657655 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.657674 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.657686 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.681159 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:48 crc kubenswrapper[4861]: E1003 13:32:48.681331 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.759687 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.759720 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.759728 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.759741 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.759749 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.862483 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.862531 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.862544 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.862561 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.862573 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.965267 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.965308 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.965322 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.965339 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:48 crc kubenswrapper[4861]: I1003 13:32:48.965352 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:48Z","lastTransitionTime":"2025-10-03T13:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.067820 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.068037 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.068128 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.068204 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.068298 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.170542 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.170813 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.170906 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.171011 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.171123 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.273388 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.273425 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.273441 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.273456 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.273466 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.375157 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.375190 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.375199 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.375211 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.375221 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.477798 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.477831 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.477847 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.477863 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.477876 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.580753 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.580804 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.580815 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.580832 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.580846 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.680023 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:49 crc kubenswrapper[4861]: E1003 13:32:49.680378 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.680135 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:49 crc kubenswrapper[4861]: E1003 13:32:49.680622 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.680070 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:49 crc kubenswrapper[4861]: E1003 13:32:49.680806 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.682780 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.682819 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.682828 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.682842 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.682851 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.785431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.785459 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.785467 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.785481 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.785490 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.888050 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.888532 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.888627 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.888734 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.888805 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.991757 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.991790 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.991797 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.991810 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:49 crc kubenswrapper[4861]: I1003 13:32:49.991818 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:49Z","lastTransitionTime":"2025-10-03T13:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.094767 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.094811 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.094824 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.094840 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.094851 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.197571 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.197608 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.197616 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.197628 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.197636 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.300094 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.300136 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.300147 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.300164 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.300176 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.403676 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.403726 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.403734 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.403753 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.403765 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.506289 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.506343 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.506360 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.506376 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.506385 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.608395 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.608431 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.608441 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.608456 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.608466 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.681470 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:50 crc kubenswrapper[4861]: E1003 13:32:50.681987 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.682325 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.713297 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.713338 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.713350 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.713363 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.713372 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.817027 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.817069 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.817089 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.817105 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.817116 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.919685 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.919718 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.919726 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.919739 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:50 crc kubenswrapper[4861]: I1003 13:32:50.919748 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:50Z","lastTransitionTime":"2025-10-03T13:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.022458 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.022508 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.022520 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.022534 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.022544 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.124701 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.124729 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.124738 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.124752 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.124761 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.226850 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.226885 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.226897 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.226911 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.226921 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.328844 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.328874 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.328882 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.328895 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.328904 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.431193 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.431246 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.431258 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.431275 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.431287 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.533484 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.533515 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.533525 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.533540 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.533549 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.585145 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585269 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:55.585250936 +0000 UTC m=+149.583235983 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.585295 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.585330 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.585349 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.585364 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585445 4861 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585483 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:33:55.585474842 +0000 UTC m=+149.583459889 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585445 4861 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585530 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585544 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:33:55.585535323 +0000 UTC m=+149.583520370 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585552 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585565 4861 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585567 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585596 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 13:33:55.585586465 +0000 UTC m=+149.583571512 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585601 4861 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585617 4861 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.585678 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 13:33:55.585660576 +0000 UTC m=+149.583645723 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.636010 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.636039 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.636050 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.636065 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.636076 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.681045 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.681442 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.681454 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.681487 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.681510 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.681601 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.738856 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.738888 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.738899 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.738913 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.738924 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.841404 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.841455 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.841464 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.841476 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.841484 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.855000 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-cft42"] Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.855113 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:51 crc kubenswrapper[4861]: E1003 13:32:51.855198 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.943596 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.943639 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.943650 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.943665 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:51 crc kubenswrapper[4861]: I1003 13:32:51.943676 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:51Z","lastTransitionTime":"2025-10-03T13:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.046541 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.046572 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.046580 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.046594 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.046605 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.111687 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/2.log" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.114393 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerStarted","Data":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.114812 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.149394 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.149421 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.149429 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.149441 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.149450 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.171513 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podStartSLOduration=64.171492966 podStartE2EDuration="1m4.171492966s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.151501604 +0000 UTC m=+86.149486651" watchObservedRunningTime="2025-10-03 13:32:52.171492966 +0000 UTC m=+86.169478013" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.171668 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hw4vl" podStartSLOduration=64.17166544 podStartE2EDuration="1m4.17166544s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.171385243 +0000 UTC m=+86.169370310" watchObservedRunningTime="2025-10-03 13:32:52.17166544 +0000 UTC m=+86.169650477" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.251426 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.251453 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.251461 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.251472 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.251481 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.259958 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-wm76s" podStartSLOduration=64.259941567 podStartE2EDuration="1m4.259941567s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.259327792 +0000 UTC m=+86.257312839" watchObservedRunningTime="2025-10-03 13:32:52.259941567 +0000 UTC m=+86.257926614" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.260090 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jwgvx" podStartSLOduration=64.26008502 podStartE2EDuration="1m4.26008502s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.230904914 +0000 UTC m=+86.228889961" watchObservedRunningTime="2025-10-03 13:32:52.26008502 +0000 UTC m=+86.258070067" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.285213 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-c97s6" podStartSLOduration=64.285194023 podStartE2EDuration="1m4.285194023s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.274520069 +0000 UTC m=+86.272505116" watchObservedRunningTime="2025-10-03 13:32:52.285194023 +0000 UTC m=+86.283179070" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.322818 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=64.322796624 podStartE2EDuration="1m4.322796624s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.304158658 +0000 UTC m=+86.302143705" watchObservedRunningTime="2025-10-03 13:32:52.322796624 +0000 UTC m=+86.320781681" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.337452 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=61.337430748 podStartE2EDuration="1m1.337430748s" podCreationTimestamp="2025-10-03 13:31:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.32303014 +0000 UTC m=+86.321015197" watchObservedRunningTime="2025-10-03 13:32:52.337430748 +0000 UTC m=+86.335415795" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.337876 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-n974h" podStartSLOduration=64.33786949 podStartE2EDuration="1m4.33786949s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.337810618 +0000 UTC m=+86.335795665" watchObservedRunningTime="2025-10-03 13:32:52.33786949 +0000 UTC m=+86.335854537" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.353409 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.353460 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.353474 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.353491 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.353505 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.423429 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podStartSLOduration=64.423406847 podStartE2EDuration="1m4.423406847s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.421177919 +0000 UTC m=+86.419162986" watchObservedRunningTime="2025-10-03 13:32:52.423406847 +0000 UTC m=+86.421391894" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.437828 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=30.437811515 podStartE2EDuration="30.437811515s" podCreationTimestamp="2025-10-03 13:32:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:52.436718816 +0000 UTC m=+86.434703863" watchObservedRunningTime="2025-10-03 13:32:52.437811515 +0000 UTC m=+86.435796562" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.455757 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.455800 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.455810 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.455824 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.455837 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.558457 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.558500 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.558511 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.558527 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.558539 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.660930 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.660973 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.660984 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.661000 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.661012 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.762583 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.762619 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.762627 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.762642 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.762651 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.866038 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.866092 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.866107 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.866128 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.866140 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.969451 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.969529 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.969552 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.969972 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:52 crc kubenswrapper[4861]: I1003 13:32:52.970376 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:52Z","lastTransitionTime":"2025-10-03T13:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.073140 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.073180 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.073198 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.073217 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.073243 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.175106 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.175155 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.175166 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.175184 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.175195 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.277847 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.277890 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.277900 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.277917 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.277926 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.381106 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.381169 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.381183 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.381209 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.381224 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.484510 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.484581 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.484591 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.484606 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.484617 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.587708 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.587758 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.587767 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.587786 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.587799 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.680992 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:53 crc kubenswrapper[4861]: E1003 13:32:53.681126 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.681363 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:53 crc kubenswrapper[4861]: E1003 13:32:53.681446 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cft42" podUID="267128bb-f8b8-4d69-99a3-ba3af795218c" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.681589 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:53 crc kubenswrapper[4861]: E1003 13:32:53.681653 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.681776 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:53 crc kubenswrapper[4861]: E1003 13:32:53.681844 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.690845 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.690880 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.690893 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.690910 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.690922 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.793630 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.793718 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.793736 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.793766 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.793786 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.895832 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.895886 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.895899 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.895917 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.895931 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.998436 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.998487 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.998498 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.998512 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:53 crc kubenswrapper[4861]: I1003 13:32:53.998523 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:53Z","lastTransitionTime":"2025-10-03T13:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.101475 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.101518 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.101534 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.101551 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.101564 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.204170 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.204207 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.204218 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.204255 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.204267 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.306836 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.306947 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.306960 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.306980 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.306996 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.409933 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.410005 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.410026 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.410069 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.410091 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.512529 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.512596 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.512603 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.512618 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.512628 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.615318 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.615369 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.615385 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.615405 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.615418 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.719031 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.719100 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.719123 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.719151 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.719174 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.821662 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.821694 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.821703 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.821718 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.821727 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.923731 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.923800 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.923813 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.923836 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 13:32:54 crc kubenswrapper[4861]: I1003 13:32:54.923851 4861 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T13:32:54Z","lastTransitionTime":"2025-10-03T13:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.025883 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.025943 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.025954 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.025965 4861 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.026066 4861 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.083062 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rg4gb"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.083908 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.089279 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.089567 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.089605 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.089583 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.089766 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.090347 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.091980 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fv562"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.092431 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.092767 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-654v6"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.093178 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.094456 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4znl7"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.095061 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.095383 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.097048 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.097893 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.099286 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-v6tz9"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.100391 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.100682 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.101567 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.103384 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.105327 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-l9srr"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.105783 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.108017 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.108790 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.120007 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.120130 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.120385 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.120503 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.120631 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.120755 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121044 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121170 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121201 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121318 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121396 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121466 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.122093 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121491 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.123339 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121524 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.123455 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121568 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121596 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121658 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121689 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.121692 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.122649 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.122707 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.122748 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.122793 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.122850 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.124694 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zjlzh"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.124897 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.125648 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.125761 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.126275 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.127074 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.127368 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.139547 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.145091 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.145683 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.153148 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.159562 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.159777 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.160871 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkmmm"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.161896 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.162172 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.164882 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.165296 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.167204 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.174300 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.176782 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.175686 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.187316 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xv8s4"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.187664 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.187967 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.188347 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.188654 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.178312 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.188883 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-t6vlm"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.189088 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.189301 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.189552 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.189724 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.187976 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.190109 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.189040 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.177186 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.177831 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.178260 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.178561 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.178677 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.181492 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.182034 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.182125 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.184427 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.184767 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.184979 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.185076 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.185125 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.185129 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.185301 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.176952 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.195423 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.195602 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.195742 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.195887 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.196452 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.196707 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.196811 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.196954 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.197056 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.201039 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-rc9jz"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.201507 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.201781 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.202076 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.202684 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.202912 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.204594 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.204768 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.204886 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.205019 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.205158 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.205310 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.205441 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.205917 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.206622 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.225140 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228198 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-config\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228267 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgnzn\" (UniqueName: \"kubernetes.io/projected/cd587cd0-9026-4456-be22-b3ad36ed845f-kube-api-access-xgnzn\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228315 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db18a7f3-e40f-4f7c-af49-4920109fc80a-config\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228352 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecce72ba-8490-4169-b5ef-df628ee12cdb-config\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228377 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db18a7f3-e40f-4f7c-af49-4920109fc80a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228409 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ecce72ba-8490-4169-b5ef-df628ee12cdb-auth-proxy-config\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228435 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd587cd0-9026-4456-be22-b3ad36ed845f-config\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228478 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd587cd0-9026-4456-be22-b3ad36ed845f-serving-cert\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228527 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm72s\" (UniqueName: \"kubernetes.io/projected/ecce72ba-8490-4169-b5ef-df628ee12cdb-kube-api-access-jm72s\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228551 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db18a7f3-e40f-4f7c-af49-4920109fc80a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228593 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f825716-7012-4f64-925d-fdf69a3b8b28-serving-cert\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228665 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ecce72ba-8490-4169-b5ef-df628ee12cdb-machine-approver-tls\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228688 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-client-ca\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228764 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkq6j\" (UniqueName: \"kubernetes.io/projected/6f825716-7012-4f64-925d-fdf69a3b8b28-kube-api-access-qkq6j\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.228788 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd587cd0-9026-4456-be22-b3ad36ed845f-trusted-ca\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.244140 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.245676 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.260878 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.261464 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.261628 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.262086 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.262432 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.263213 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.263570 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.264576 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.264864 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.264976 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.265316 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.266348 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.266382 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.267864 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.269324 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.278487 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.279134 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.279183 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.279333 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.279423 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.279432 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.283301 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-c7d78"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.283812 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.284161 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.284265 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.284882 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.285436 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.285921 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.288538 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.289127 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.289547 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.290265 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.297615 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.297645 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.297926 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zpskq"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.298553 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.298672 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s84lp"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.299287 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.302282 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.303120 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.303790 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6lkw8"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.304300 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.304612 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.306191 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bj6mh"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.306790 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.307074 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.309714 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-46jp8"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.311122 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.311149 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.313108 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.313380 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.314327 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.315528 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.321745 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-5nfml"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.322822 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fv562"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.322992 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.323779 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.331696 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.331997 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-c7d78"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.332745 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm72s\" (UniqueName: \"kubernetes.io/projected/ecce72ba-8490-4169-b5ef-df628ee12cdb-kube-api-access-jm72s\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.332838 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db18a7f3-e40f-4f7c-af49-4920109fc80a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.332915 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f825716-7012-4f64-925d-fdf69a3b8b28-serving-cert\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.332961 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ecce72ba-8490-4169-b5ef-df628ee12cdb-machine-approver-tls\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.332981 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-client-ca\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333016 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkq6j\" (UniqueName: \"kubernetes.io/projected/6f825716-7012-4f64-925d-fdf69a3b8b28-kube-api-access-qkq6j\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333034 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd587cd0-9026-4456-be22-b3ad36ed845f-trusted-ca\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333176 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgnzn\" (UniqueName: \"kubernetes.io/projected/cd587cd0-9026-4456-be22-b3ad36ed845f-kube-api-access-xgnzn\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333219 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-config\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333265 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db18a7f3-e40f-4f7c-af49-4920109fc80a-config\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333291 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db18a7f3-e40f-4f7c-af49-4920109fc80a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333541 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecce72ba-8490-4169-b5ef-df628ee12cdb-config\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333567 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ecce72ba-8490-4169-b5ef-df628ee12cdb-auth-proxy-config\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333587 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd587cd0-9026-4456-be22-b3ad36ed845f-config\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.333612 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd587cd0-9026-4456-be22-b3ad36ed845f-serving-cert\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.341140 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd587cd0-9026-4456-be22-b3ad36ed845f-trusted-ca\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.344548 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.345618 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-client-ca\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.346098 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f825716-7012-4f64-925d-fdf69a3b8b28-serving-cert\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.346697 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecce72ba-8490-4169-b5ef-df628ee12cdb-config\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.347218 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db18a7f3-e40f-4f7c-af49-4920109fc80a-config\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.347854 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd587cd0-9026-4456-be22-b3ad36ed845f-serving-cert\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.349565 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.349607 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.349625 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.350561 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd587cd0-9026-4456-be22-b3ad36ed845f-config\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.355736 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.355926 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ecce72ba-8490-4169-b5ef-df628ee12cdb-machine-approver-tls\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.357407 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-config\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.360539 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db18a7f3-e40f-4f7c-af49-4920109fc80a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.360662 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.363438 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.366331 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-654v6"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.367303 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.375107 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.375159 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-v6tz9"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.376808 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkmmm"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.378555 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-l9srr"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.379467 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4znl7"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.381325 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.384894 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.386409 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.386569 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kgpdn"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.387743 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.389538 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-q7c5v"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.389998 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.392846 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.394180 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zjlzh"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.396619 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.400183 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.402223 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.403853 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s84lp"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.405844 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.406483 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-t6vlm"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.411510 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.411579 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rc9jz"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.412556 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xv8s4"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.413854 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.414979 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6lkw8"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.416295 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bj6mh"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.418755 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kgpdn"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.418844 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.420101 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.422908 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zpskq"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.424174 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.425873 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.426280 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.427276 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rg4gb"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.428571 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5nfml"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.429969 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6gqgm"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.430821 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.431342 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6gqgm"] Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.453804 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.466495 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.486563 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.506485 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.526129 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.546895 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.576905 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.586485 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.606197 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.646479 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.650369 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ecce72ba-8490-4169-b5ef-df628ee12cdb-auth-proxy-config\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.666049 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.680183 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.680437 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.680199 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.680184 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.685774 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.705858 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.726673 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.746092 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.766306 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.785860 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.805970 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.826450 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.845619 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.866155 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.886391 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.906971 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.925979 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.946440 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.967057 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 03 13:32:55 crc kubenswrapper[4861]: I1003 13:32:55.986198 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.007356 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.026585 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.046874 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.067021 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.086993 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.106822 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.125960 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.146844 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.166393 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.187378 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.206358 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.226996 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.247311 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.267429 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.286093 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.304910 4861 request.go:700] Waited for 1.005426356s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/secrets?fieldSelector=metadata.name%3Detcd-operator-serving-cert&limit=500&resourceVersion=0 Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.306968 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.326419 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.345796 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.365863 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.385835 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.405898 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.427424 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.447079 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.466082 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.487035 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.506811 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.527029 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.547470 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.567171 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.586982 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.612297 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.627132 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.646324 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.666935 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.686752 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.706610 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.726345 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.746714 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.766089 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.786143 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.806880 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.826913 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.846149 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.866440 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.886725 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.907285 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.926612 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.947029 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 03 13:32:56 crc kubenswrapper[4861]: I1003 13:32:56.966621 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.006990 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.026201 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.047719 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.066353 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.105587 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgnzn\" (UniqueName: \"kubernetes.io/projected/cd587cd0-9026-4456-be22-b3ad36ed845f-kube-api-access-xgnzn\") pod \"console-operator-58897d9998-l9srr\" (UID: \"cd587cd0-9026-4456-be22-b3ad36ed845f\") " pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.120698 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm72s\" (UniqueName: \"kubernetes.io/projected/ecce72ba-8490-4169-b5ef-df628ee12cdb-kube-api-access-jm72s\") pod \"machine-approver-56656f9798-8gdwj\" (UID: \"ecce72ba-8490-4169-b5ef-df628ee12cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.141144 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkq6j\" (UniqueName: \"kubernetes.io/projected/6f825716-7012-4f64-925d-fdf69a3b8b28-kube-api-access-qkq6j\") pod \"route-controller-manager-6576b87f9c-gjvs9\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.165690 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db18a7f3-e40f-4f7c-af49-4920109fc80a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-htfhq\" (UID: \"db18a7f3-e40f-4f7c-af49-4920109fc80a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.165918 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.186733 4861 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.191141 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.207415 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.211665 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.226206 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.245726 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.246752 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.255455 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.268091 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.288721 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.307842 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.325215 4861 request.go:700] Waited for 1.894024757s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.328005 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365420 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-certificates\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365462 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzhhp\" (UniqueName: \"kubernetes.io/projected/51bf4935-7bd2-4059-9869-60fc8db46d82-kube-api-access-dzhhp\") pod \"dns-operator-744455d44c-zjlzh\" (UID: \"51bf4935-7bd2-4059-9869-60fc8db46d82\") " pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365487 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365514 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c805746f-1e3e-488f-abda-4584a3028187-serving-cert\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365534 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-audit\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365573 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjn98\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-kube-api-access-hjn98\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365605 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0118936f-47b4-4d58-956c-572d58803b3f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365627 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhtgw\" (UniqueName: \"kubernetes.io/projected/3bf6ab88-b524-4a66-9fff-f873646d11d9-kube-api-access-lhtgw\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365646 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8cpv\" (UniqueName: \"kubernetes.io/projected/a6a84618-0674-4410-ab88-6896066b6077-kube-api-access-n8cpv\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365673 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-service-ca\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365691 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-serving-cert\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365710 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-policies\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365728 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365750 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-config\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365769 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365789 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-audit-policies\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365807 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-config\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365837 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365857 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365880 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-oauth-serving-cert\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365898 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-dir\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365919 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365940 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3bf6ab88-b524-4a66-9fff-f873646d11d9-metrics-tls\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365960 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.365982 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-service-ca\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366005 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/51bf4935-7bd2-4059-9869-60fc8db46d82-metrics-tls\") pod \"dns-operator-744455d44c-zjlzh\" (UID: \"51bf4935-7bd2-4059-9869-60fc8db46d82\") " pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366027 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366048 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6a84618-0674-4410-ab88-6896066b6077-serving-cert\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366070 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366092 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-image-import-ca\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366112 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-encryption-config\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366133 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366156 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366183 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8qdq\" (UniqueName: \"kubernetes.io/projected/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-kube-api-access-m8qdq\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366205 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366248 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5525z\" (UniqueName: \"kubernetes.io/projected/802caeaa-43a4-4cca-b946-5b561df185a8-kube-api-access-5525z\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366272 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-config\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366291 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-trusted-ca-bundle\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366313 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-oauth-config\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366333 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366354 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/802caeaa-43a4-4cca-b946-5b561df185a8-node-pullsecrets\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366378 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366391 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f279daa8-7a4e-405d-b7af-499a2179001f-serving-cert\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366426 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366449 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-trusted-ca\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366471 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366493 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/802caeaa-43a4-4cca-b946-5b561df185a8-audit-dir\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366515 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-client-ca\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366537 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7hbj\" (UniqueName: \"kubernetes.io/projected/04154cd6-a67c-42d3-bbb0-951c4986390d-kube-api-access-t7hbj\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366558 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-serving-cert\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366581 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0118936f-47b4-4d58-956c-572d58803b3f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366606 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z8ps\" (UniqueName: \"kubernetes.io/projected/0118936f-47b4-4d58-956c-572d58803b3f-kube-api-access-4z8ps\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366627 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-etcd-client\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366648 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4fcl\" (UniqueName: \"kubernetes.io/projected/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-kube-api-access-x4fcl\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366670 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366691 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d27eb32-1a56-4d8c-9504-4b7fa8261df6-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8tkv7\" (UID: \"3d27eb32-1a56-4d8c-9504-4b7fa8261df6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366710 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366729 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-tls\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366750 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366770 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04154cd6-a67c-42d3-bbb0-951c4986390d-config\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366794 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-service-ca-bundle\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366814 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-serving-cert\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366843 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0118936f-47b4-4d58-956c-572d58803b3f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366864 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-encryption-config\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366892 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-installation-pull-secrets\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366914 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-ca-trust-extracted\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366933 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49pjb\" (UniqueName: \"kubernetes.io/projected/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-kube-api-access-49pjb\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366953 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-audit-dir\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366973 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.366995 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367017 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdfw2\" (UniqueName: \"kubernetes.io/projected/3d27eb32-1a56-4d8c-9504-4b7fa8261df6-kube-api-access-pdfw2\") pod \"cluster-samples-operator-665b6dd947-8tkv7\" (UID: \"3d27eb32-1a56-4d8c-9504-4b7fa8261df6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367039 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f279daa8-7a4e-405d-b7af-499a2179001f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.367077 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:57.867038556 +0000 UTC m=+91.865023603 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367106 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-bound-sa-token\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367133 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jskrd\" (UniqueName: \"kubernetes.io/projected/c805746f-1e3e-488f-abda-4584a3028187-kube-api-access-jskrd\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367149 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-etcd-client\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367373 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cszvg\" (UniqueName: \"kubernetes.io/projected/a7c2774d-2d73-439d-94d7-6c184b05bf91-kube-api-access-cszvg\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367417 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/04154cd6-a67c-42d3-bbb0-951c4986390d-images\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367441 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367472 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367498 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7c2774d-2d73-439d-94d7-6c184b05bf91-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367523 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3bf6ab88-b524-4a66-9fff-f873646d11d9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367562 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/04154cd6-a67c-42d3-bbb0-951c4986390d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367611 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-config\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367639 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7c2774d-2d73-439d-94d7-6c184b05bf91-config\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367663 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bf6ab88-b524-4a66-9fff-f873646d11d9-trusted-ca\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367685 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367710 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpnnd\" (UniqueName: \"kubernetes.io/projected/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-kube-api-access-gpnnd\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.367735 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn22z\" (UniqueName: \"kubernetes.io/projected/f279daa8-7a4e-405d-b7af-499a2179001f-kube-api-access-vn22z\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.387245 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.393123 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-l9srr"] Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.412835 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.424966 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9"] Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.425936 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 03 13:32:57 crc kubenswrapper[4861]: W1003 13:32:57.437485 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f825716_7012_4f64_925d_fdf69a3b8b28.slice/crio-8892703c17b4be57ae35707c84eab3755fc33d1e950b8d5138c6ae8a78e45626 WatchSource:0}: Error finding container 8892703c17b4be57ae35707c84eab3755fc33d1e950b8d5138c6ae8a78e45626: Status 404 returned error can't find the container with id 8892703c17b4be57ae35707c84eab3755fc33d1e950b8d5138c6ae8a78e45626 Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.446573 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.467149 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468380 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468548 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c3c7089-d98e-4504-91c2-27851ed21d16-config-volume\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468581 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468602 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-config\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468628 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzw98\" (UniqueName: \"kubernetes.io/projected/5b4e4f2b-6854-4b02-9e1a-79aeff76d109-kube-api-access-qzw98\") pod \"ingress-canary-5nfml\" (UID: \"5b4e4f2b-6854-4b02-9e1a-79aeff76d109\") " pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468650 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9spb\" (UniqueName: \"kubernetes.io/projected/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-kube-api-access-b9spb\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468676 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7c2774d-2d73-439d-94d7-6c184b05bf91-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468697 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3bf6ab88-b524-4a66-9fff-f873646d11d9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468721 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn22z\" (UniqueName: \"kubernetes.io/projected/f279daa8-7a4e-405d-b7af-499a2179001f-kube-api-access-vn22z\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468746 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slf47\" (UniqueName: \"kubernetes.io/projected/876d6207-8976-4d02-887b-b431a4821eab-kube-api-access-slf47\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcq84\" (UID: \"876d6207-8976-4d02-887b-b431a4821eab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468780 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bf6ab88-b524-4a66-9fff-f873646d11d9-trusted-ca\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468804 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468825 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpnnd\" (UniqueName: \"kubernetes.io/projected/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-kube-api-access-gpnnd\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468859 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k865v\" (UniqueName: \"kubernetes.io/projected/b546d058-2a13-4ed4-ad3b-061d9870f9ec-kube-api-access-k865v\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468891 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64t7v\" (UniqueName: \"kubernetes.io/projected/9844df32-3946-46fe-90cb-0cc7fedd4af5-kube-api-access-64t7v\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468910 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e98a1486-4c73-4895-923a-dc3b4dcead56-srv-cert\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468930 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm2l4\" (UniqueName: \"kubernetes.io/projected/c4da994b-a677-44ec-a87a-5ae4fdd5e943-kube-api-access-xm2l4\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468953 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjn98\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-kube-api-access-hjn98\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.468975 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9vds\" (UniqueName: \"kubernetes.io/projected/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-kube-api-access-s9vds\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469008 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0118936f-47b4-4d58-956c-572d58803b3f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469060 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8cpv\" (UniqueName: \"kubernetes.io/projected/a6a84618-0674-4410-ab88-6896066b6077-kube-api-access-n8cpv\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469126 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b546d058-2a13-4ed4-ad3b-061d9870f9ec-config\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469166 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5e6df402-f20e-4982-810e-5664640bb0ea-tmpfs\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469188 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39d336a2-fdb0-4500-a751-b771672ae2bd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469210 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-service-ca\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469270 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-serving-cert\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469686 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-config\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469753 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b430fcf9-3c59-4aa8-b554-9e85cf2fcb40-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s84lp\" (UID: \"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469812 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgl9f\" (UniqueName: \"kubernetes.io/projected/eb006550-0648-416e-8bb0-48a5baa22a11-kube-api-access-vgl9f\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469892 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-mountpoint-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469917 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c4da994b-a677-44ec-a87a-5ae4fdd5e943-srv-cert\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469942 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469964 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-oauth-serving-cert\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.469987 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-client\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470005 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmx9k\" (UniqueName: \"kubernetes.io/projected/88db1fac-4c48-400a-9eee-f5c11d8dd12e-kube-api-access-dmx9k\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470029 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3bf6ab88-b524-4a66-9fff-f873646d11d9-metrics-tls\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470053 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470079 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-service-ca\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470105 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6eb664e0-cd97-4267-a6bc-523316711be5-proxy-tls\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470366 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470433 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6a84618-0674-4410-ab88-6896066b6077-serving-cert\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470457 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470481 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470506 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpgc8\" (UniqueName: \"kubernetes.io/projected/6eb664e0-cd97-4267-a6bc-523316711be5-kube-api-access-zpgc8\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470539 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-encryption-config\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470558 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470580 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470602 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8qdq\" (UniqueName: \"kubernetes.io/projected/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-kube-api-access-m8qdq\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470626 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkvqc\" (UniqueName: \"kubernetes.io/projected/e98a1486-4c73-4895-923a-dc3b4dcead56-kube-api-access-mkvqc\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470649 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5525z\" (UniqueName: \"kubernetes.io/projected/802caeaa-43a4-4cca-b946-5b561df185a8-kube-api-access-5525z\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470671 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfdmx\" (UniqueName: \"kubernetes.io/projected/b430fcf9-3c59-4aa8-b554-9e85cf2fcb40-kube-api-access-bfdmx\") pod \"multus-admission-controller-857f4d67dd-s84lp\" (UID: \"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470694 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e98a1486-4c73-4895-923a-dc3b4dcead56-profile-collector-cert\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-config\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470739 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-metrics-tls\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470762 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-trusted-ca-bundle\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470788 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0993744-e39f-4dbf-a726-7580528ca825-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470818 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39d336a2-fdb0-4500-a751-b771672ae2bd-config\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470856 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhpkc\" (UniqueName: \"kubernetes.io/projected/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-kube-api-access-vhpkc\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470878 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-trusted-ca\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470901 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.470979 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b546d058-2a13-4ed4-ad3b-061d9870f9ec-serving-cert\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471046 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d27eb32-1a56-4d8c-9504-4b7fa8261df6-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8tkv7\" (UID: \"3d27eb32-1a56-4d8c-9504-4b7fa8261df6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471071 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471285 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z8ps\" (UniqueName: \"kubernetes.io/projected/0118936f-47b4-4d58-956c-572d58803b3f-kube-api-access-4z8ps\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471312 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4fcl\" (UniqueName: \"kubernetes.io/projected/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-kube-api-access-x4fcl\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471445 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04154cd6-a67c-42d3-bbb0-951c4986390d-config\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471469 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-tls\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.471659 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:57.971536828 +0000 UTC m=+91.969521875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471681 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471826 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-serving-cert\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.471854 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39d336a2-fdb0-4500-a751-b771672ae2bd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.472008 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0118936f-47b4-4d58-956c-572d58803b3f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.472036 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7wl5\" (UniqueName: \"kubernetes.io/projected/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-kube-api-access-h7wl5\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.472171 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-csi-data-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.472192 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-service-ca-bundle\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.472206 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-oauth-serving-cert\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.473657 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.474220 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-config\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.474638 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-service-ca\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.474690 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.477520 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-trusted-ca-bundle\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.477587 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0118936f-47b4-4d58-956c-572d58803b3f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.477950 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3bf6ab88-b524-4a66-9fff-f873646d11d9-metrics-tls\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.478018 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.478825 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.478891 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.479723 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-service-ca\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.480146 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-config\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.480937 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.481318 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.481489 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-serving-cert\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482224 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482332 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-installation-pull-secrets\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482364 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-default-certificate\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482391 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04154cd6-a67c-42d3-bbb0-951c4986390d-config\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482412 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482441 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjgz7\" (UniqueName: \"kubernetes.io/projected/f23bb6a0-acb7-4ada-96a2-73a978d75125-kube-api-access-zjgz7\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482468 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f279daa8-7a4e-405d-b7af-499a2179001f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482494 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5aad7266-a7c4-45fb-bbed-e1a1277681ef-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482573 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-ca\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482606 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/04154cd6-a67c-42d3-bbb0-951c4986390d-images\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482631 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482657 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/876d6207-8976-4d02-887b-b431a4821eab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcq84\" (UID: \"876d6207-8976-4d02-887b-b431a4821eab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482681 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-registration-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482702 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/04154cd6-a67c-42d3-bbb0-951c4986390d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482721 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482743 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vdwf\" (UniqueName: \"kubernetes.io/projected/5e6df402-f20e-4982-810e-5664640bb0ea-kube-api-access-4vdwf\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.482765 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-socket-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.483110 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-trusted-ca\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.483415 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq"] Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.484099 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7c2774d-2d73-439d-94d7-6c184b05bf91-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.484105 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-config\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.485164 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.485458 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-serving-cert\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.485542 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-tls\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.485955 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/04154cd6-a67c-42d3-bbb0-951c4986390d-images\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486548 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7c2774d-2d73-439d-94d7-6c184b05bf91-config\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486602 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-certificates\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486624 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vr65\" (UniqueName: \"kubernetes.io/projected/b35950ee-9000-4269-a58d-f1d2c4563f05-kube-api-access-4vr65\") pod \"downloads-7954f5f757-rc9jz\" (UID: \"b35950ee-9000-4269-a58d-f1d2c4563f05\") " pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486672 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzhhp\" (UniqueName: \"kubernetes.io/projected/51bf4935-7bd2-4059-9869-60fc8db46d82-kube-api-access-dzhhp\") pod \"dns-operator-744455d44c-zjlzh\" (UID: \"51bf4935-7bd2-4059-9869-60fc8db46d82\") " pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486712 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486717 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f279daa8-7a4e-405d-b7af-499a2179001f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486736 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5aaec6a4-8da9-4ac8-a792-131254ad8e23-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lzfdk\" (UID: \"5aaec6a4-8da9-4ac8-a792-131254ad8e23\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486761 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c805746f-1e3e-488f-abda-4584a3028187-serving-cert\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486780 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-audit\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486800 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j4cz\" (UniqueName: \"kubernetes.io/projected/217fa116-2a2c-4c70-a13c-370fd7c2ffd7-kube-api-access-2j4cz\") pod \"migrator-59844c95c7-t47dt\" (UID: \"217fa116-2a2c-4c70-a13c-370fd7c2ffd7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486854 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5e6df402-f20e-4982-810e-5664640bb0ea-webhook-cert\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486889 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhtgw\" (UniqueName: \"kubernetes.io/projected/3bf6ab88-b524-4a66-9fff-f873646d11d9-kube-api-access-lhtgw\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486906 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486952 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-policies\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486955 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-encryption-config\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486970 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.486988 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487046 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-audit-policies\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487066 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-config\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487083 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-config\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487102 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487121 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc2sr\" (UniqueName: \"kubernetes.io/projected/0c3c7089-d98e-4504-91c2-27851ed21d16-kube-api-access-tc2sr\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487140 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487157 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-config-volume\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487173 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-stats-auth\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487206 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-dir\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487241 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-metrics-certs\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487261 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-proxy-tls\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487331 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7jgz\" (UniqueName: \"kubernetes.io/projected/e0993744-e39f-4dbf-a726-7580528ca825-kube-api-access-d7jgz\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487363 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/51bf4935-7bd2-4059-9869-60fc8db46d82-metrics-tls\") pod \"dns-operator-744455d44c-zjlzh\" (UID: \"51bf4935-7bd2-4059-9869-60fc8db46d82\") " pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487381 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-image-import-ca\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487399 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5e6df402-f20e-4982-810e-5664640bb0ea-apiservice-cert\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487420 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487452 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9844df32-3946-46fe-90cb-0cc7fedd4af5-signing-cabundle\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487471 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/802caeaa-43a4-4cca-b946-5b561df185a8-node-pullsecrets\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487489 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f279daa8-7a4e-405d-b7af-499a2179001f-serving-cert\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487507 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c4da994b-a677-44ec-a87a-5ae4fdd5e943-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487525 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-oauth-config\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487542 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487560 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-serving-cert\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487578 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/802caeaa-43a4-4cca-b946-5b561df185a8-audit-dir\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487695 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6a84618-0674-4410-ab88-6896066b6077-serving-cert\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487734 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-client-ca\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487753 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-images\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487782 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7hbj\" (UniqueName: \"kubernetes.io/projected/04154cd6-a67c-42d3-bbb0-951c4986390d-kube-api-access-t7hbj\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487782 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/802caeaa-43a4-4cca-b946-5b561df185a8-audit-dir\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487789 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bf6ab88-b524-4a66-9fff-f873646d11d9-trusted-ca\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487801 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-plugins-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487817 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0993744-e39f-4dbf-a726-7580528ca825-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487835 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0118936f-47b4-4d58-956c-572d58803b3f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487851 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-etcd-client\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487867 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487904 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5aad7266-a7c4-45fb-bbed-e1a1277681ef-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487921 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5b4e4f2b-6854-4b02-9e1a-79aeff76d109-cert\") pod \"ingress-canary-5nfml\" (UID: \"5b4e4f2b-6854-4b02-9e1a-79aeff76d109\") " pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487938 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6eb664e0-cd97-4267-a6bc-523316711be5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487956 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-service-ca-bundle\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487972 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9844df32-3946-46fe-90cb-0cc7fedd4af5-signing-key\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487987 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-certs\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488002 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488033 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-encryption-config\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488049 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488067 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eb006550-0648-416e-8bb0-48a5baa22a11-serving-cert\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488085 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-ca-trust-extracted\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488102 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49pjb\" (UniqueName: \"kubernetes.io/projected/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-kube-api-access-49pjb\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488120 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-audit-dir\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488137 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdfw2\" (UniqueName: \"kubernetes.io/projected/3d27eb32-1a56-4d8c-9504-4b7fa8261df6-kube-api-access-pdfw2\") pod \"cluster-samples-operator-665b6dd947-8tkv7\" (UID: \"3d27eb32-1a56-4d8c-9504-4b7fa8261df6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488154 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c3c7089-d98e-4504-91c2-27851ed21d16-secret-volume\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488171 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-node-bootstrap-token\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488187 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-etcd-client\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-config\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.487099 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488569 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aad7266-a7c4-45fb-bbed-e1a1277681ef-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488604 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-service-ca\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488661 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-bound-sa-token\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488690 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jskrd\" (UniqueName: \"kubernetes.io/projected/c805746f-1e3e-488f-abda-4584a3028187-kube-api-access-jskrd\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488716 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cszvg\" (UniqueName: \"kubernetes.io/projected/a7c2774d-2d73-439d-94d7-6c184b05bf91-kube-api-access-cszvg\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488741 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6wpm\" (UniqueName: \"kubernetes.io/projected/5aaec6a4-8da9-4ac8-a792-131254ad8e23-kube-api-access-m6wpm\") pod \"package-server-manager-789f6589d5-lzfdk\" (UID: \"5aaec6a4-8da9-4ac8-a792-131254ad8e23\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.490183 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/802caeaa-43a4-4cca-b946-5b561df185a8-node-pullsecrets\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.490605 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7c2774d-2d73-439d-94d7-6c184b05bf91-config\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.492088 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.492503 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-certificates\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.492541 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c805746f-1e3e-488f-abda-4584a3028187-serving-cert\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.493177 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-audit\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.492619 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/802caeaa-43a4-4cca-b946-5b561df185a8-etcd-client\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.494344 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.488576 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-audit-dir\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.494606 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-audit-policies\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.495111 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-policies\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.495599 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-dir\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.496048 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.496479 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.496574 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.496999 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-client-ca\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.497420 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.497885 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-installation-pull-secrets\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.498160 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-ca-trust-extracted\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.499041 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c805746f-1e3e-488f-abda-4584a3028187-service-ca-bundle\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.499556 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.499930 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.500150 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/802caeaa-43a4-4cca-b946-5b561df185a8-image-import-ca\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.500192 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.503042 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0118936f-47b4-4d58-956c-572d58803b3f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.505567 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.506101 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-etcd-client\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.506928 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/51bf4935-7bd2-4059-9869-60fc8db46d82-metrics-tls\") pod \"dns-operator-744455d44c-zjlzh\" (UID: \"51bf4935-7bd2-4059-9869-60fc8db46d82\") " pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.507112 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/04154cd6-a67c-42d3-bbb0-951c4986390d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.508443 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-serving-cert\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.509021 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.511147 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-encryption-config\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.513100 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f279daa8-7a4e-405d-b7af-499a2179001f-serving-cert\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.513211 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d27eb32-1a56-4d8c-9504-4b7fa8261df6-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8tkv7\" (UID: \"3d27eb32-1a56-4d8c-9504-4b7fa8261df6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.516535 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.516689 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.517665 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-oauth-config\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.545049 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8cpv\" (UniqueName: \"kubernetes.io/projected/a6a84618-0674-4410-ab88-6896066b6077-kube-api-access-n8cpv\") pod \"controller-manager-879f6c89f-654v6\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.561642 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjn98\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-kube-api-access-hjn98\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.582742 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0118936f-47b4-4d58-956c-572d58803b3f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593060 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c4da994b-a677-44ec-a87a-5ae4fdd5e943-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593108 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-images\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593140 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-plugins-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593160 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0993744-e39f-4dbf-a726-7580528ca825-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593183 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5aad7266-a7c4-45fb-bbed-e1a1277681ef-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593206 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5b4e4f2b-6854-4b02-9e1a-79aeff76d109-cert\") pod \"ingress-canary-5nfml\" (UID: \"5b4e4f2b-6854-4b02-9e1a-79aeff76d109\") " pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593246 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6eb664e0-cd97-4267-a6bc-523316711be5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593269 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9844df32-3946-46fe-90cb-0cc7fedd4af5-signing-key\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593289 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-certs\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593310 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593332 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eb006550-0648-416e-8bb0-48a5baa22a11-serving-cert\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593370 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c3c7089-d98e-4504-91c2-27851ed21d16-secret-volume\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593390 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-node-bootstrap-token\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593409 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aad7266-a7c4-45fb-bbed-e1a1277681ef-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593428 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-service-ca\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593485 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6wpm\" (UniqueName: \"kubernetes.io/projected/5aaec6a4-8da9-4ac8-a792-131254ad8e23-kube-api-access-m6wpm\") pod \"package-server-manager-789f6589d5-lzfdk\" (UID: \"5aaec6a4-8da9-4ac8-a792-131254ad8e23\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593507 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c3c7089-d98e-4504-91c2-27851ed21d16-config-volume\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593530 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzw98\" (UniqueName: \"kubernetes.io/projected/5b4e4f2b-6854-4b02-9e1a-79aeff76d109-kube-api-access-qzw98\") pod \"ingress-canary-5nfml\" (UID: \"5b4e4f2b-6854-4b02-9e1a-79aeff76d109\") " pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593552 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9spb\" (UniqueName: \"kubernetes.io/projected/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-kube-api-access-b9spb\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593585 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slf47\" (UniqueName: \"kubernetes.io/projected/876d6207-8976-4d02-887b-b431a4821eab-kube-api-access-slf47\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcq84\" (UID: \"876d6207-8976-4d02-887b-b431a4821eab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593614 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k865v\" (UniqueName: \"kubernetes.io/projected/b546d058-2a13-4ed4-ad3b-061d9870f9ec-kube-api-access-k865v\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593643 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64t7v\" (UniqueName: \"kubernetes.io/projected/9844df32-3946-46fe-90cb-0cc7fedd4af5-kube-api-access-64t7v\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593663 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e98a1486-4c73-4895-923a-dc3b4dcead56-srv-cert\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593682 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm2l4\" (UniqueName: \"kubernetes.io/projected/c4da994b-a677-44ec-a87a-5ae4fdd5e943-kube-api-access-xm2l4\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593701 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9vds\" (UniqueName: \"kubernetes.io/projected/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-kube-api-access-s9vds\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593730 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b546d058-2a13-4ed4-ad3b-061d9870f9ec-config\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593749 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5e6df402-f20e-4982-810e-5664640bb0ea-tmpfs\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593768 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39d336a2-fdb0-4500-a751-b771672ae2bd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593790 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b430fcf9-3c59-4aa8-b554-9e85cf2fcb40-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s84lp\" (UID: \"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593810 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgl9f\" (UniqueName: \"kubernetes.io/projected/eb006550-0648-416e-8bb0-48a5baa22a11-kube-api-access-vgl9f\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593833 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-mountpoint-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593852 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c4da994b-a677-44ec-a87a-5ae4fdd5e943-srv-cert\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593872 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-client\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593895 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmx9k\" (UniqueName: \"kubernetes.io/projected/88db1fac-4c48-400a-9eee-f5c11d8dd12e-kube-api-access-dmx9k\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593919 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6eb664e0-cd97-4267-a6bc-523316711be5-proxy-tls\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593944 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpgc8\" (UniqueName: \"kubernetes.io/projected/6eb664e0-cd97-4267-a6bc-523316711be5-kube-api-access-zpgc8\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593968 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.593998 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkvqc\" (UniqueName: \"kubernetes.io/projected/e98a1486-4c73-4895-923a-dc3b4dcead56-kube-api-access-mkvqc\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.594028 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfdmx\" (UniqueName: \"kubernetes.io/projected/b430fcf9-3c59-4aa8-b554-9e85cf2fcb40-kube-api-access-bfdmx\") pod \"multus-admission-controller-857f4d67dd-s84lp\" (UID: \"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.594055 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e98a1486-4c73-4895-923a-dc3b4dcead56-profile-collector-cert\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.594577 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.594965 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-plugins-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.594082 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-metrics-tls\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596176 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0993744-e39f-4dbf-a726-7580528ca825-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596308 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39d336a2-fdb0-4500-a751-b771672ae2bd-config\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596410 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhpkc\" (UniqueName: \"kubernetes.io/projected/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-kube-api-access-vhpkc\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596520 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b546d058-2a13-4ed4-ad3b-061d9870f9ec-serving-cert\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596622 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596736 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39d336a2-fdb0-4500-a751-b771672ae2bd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596840 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7wl5\" (UniqueName: \"kubernetes.io/projected/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-kube-api-access-h7wl5\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596927 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-images\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.596930 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-csi-data-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597019 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-service-ca-bundle\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597068 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-default-certificate\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597086 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjgz7\" (UniqueName: \"kubernetes.io/projected/f23bb6a0-acb7-4ada-96a2-73a978d75125-kube-api-access-zjgz7\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597125 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5aad7266-a7c4-45fb-bbed-e1a1277681ef-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597141 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-ca\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597161 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/876d6207-8976-4d02-887b-b431a4821eab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcq84\" (UID: \"876d6207-8976-4d02-887b-b431a4821eab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597181 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-registration-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597199 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vdwf\" (UniqueName: \"kubernetes.io/projected/5e6df402-f20e-4982-810e-5664640bb0ea-kube-api-access-4vdwf\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597214 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-socket-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597249 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vr65\" (UniqueName: \"kubernetes.io/projected/b35950ee-9000-4269-a58d-f1d2c4563f05-kube-api-access-4vr65\") pod \"downloads-7954f5f757-rc9jz\" (UID: \"b35950ee-9000-4269-a58d-f1d2c4563f05\") " pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597285 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5aaec6a4-8da9-4ac8-a792-131254ad8e23-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lzfdk\" (UID: \"5aaec6a4-8da9-4ac8-a792-131254ad8e23\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597304 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j4cz\" (UniqueName: \"kubernetes.io/projected/217fa116-2a2c-4c70-a13c-370fd7c2ffd7-kube-api-access-2j4cz\") pod \"migrator-59844c95c7-t47dt\" (UID: \"217fa116-2a2c-4c70-a13c-370fd7c2ffd7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597320 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5e6df402-f20e-4982-810e-5664640bb0ea-webhook-cert\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597350 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597370 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-config\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597388 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc2sr\" (UniqueName: \"kubernetes.io/projected/0c3c7089-d98e-4504-91c2-27851ed21d16-kube-api-access-tc2sr\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597410 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-config-volume\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597425 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-stats-auth\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.597745 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6eb664e0-cd97-4267-a6bc-523316711be5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.598177 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-csi-data-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.598199 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0993744-e39f-4dbf-a726-7580528ca825-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.600523 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b430fcf9-3c59-4aa8-b554-9e85cf2fcb40-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s84lp\" (UID: \"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.601333 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c4da994b-a677-44ec-a87a-5ae4fdd5e943-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.601501 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-socket-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602297 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-service-ca-bundle\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602300 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-metrics-certs\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602365 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-proxy-tls\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602400 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7jgz\" (UniqueName: \"kubernetes.io/projected/e0993744-e39f-4dbf-a726-7580528ca825-kube-api-access-d7jgz\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602440 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5e6df402-f20e-4982-810e-5664640bb0ea-apiservice-cert\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602474 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9844df32-3946-46fe-90cb-0cc7fedd4af5-signing-cabundle\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.602836 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0993744-e39f-4dbf-a726-7580528ca825-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.603579 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39d336a2-fdb0-4500-a751-b771672ae2bd-config\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.606564 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c4da994b-a677-44ec-a87a-5ae4fdd5e943-srv-cert\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.607737 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9844df32-3946-46fe-90cb-0cc7fedd4af5-signing-cabundle\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.607832 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-registration-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.608033 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c3c7089-d98e-4504-91c2-27851ed21d16-config-volume\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.608968 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b546d058-2a13-4ed4-ad3b-061d9870f9ec-serving-cert\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.609905 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e98a1486-4c73-4895-923a-dc3b4dcead56-srv-cert\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.610569 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b546d058-2a13-4ed4-ad3b-061d9870f9ec-config\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.610891 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.110877211 +0000 UTC m=+92.108862258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.610895 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-proxy-tls\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.610951 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5e6df402-f20e-4982-810e-5664640bb0ea-tmpfs\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.611127 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5aaec6a4-8da9-4ac8-a792-131254ad8e23-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lzfdk\" (UID: \"5aaec6a4-8da9-4ac8-a792-131254ad8e23\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.611613 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-metrics-certs\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.607748 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5b4e4f2b-6854-4b02-9e1a-79aeff76d109-cert\") pod \"ingress-canary-5nfml\" (UID: \"5b4e4f2b-6854-4b02-9e1a-79aeff76d109\") " pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.611792 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aad7266-a7c4-45fb-bbed-e1a1277681ef-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.612357 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9844df32-3946-46fe-90cb-0cc7fedd4af5-signing-key\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.613661 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-config-volume\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.613864 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/88db1fac-4c48-400a-9eee-f5c11d8dd12e-mountpoint-dir\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.614691 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.614998 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-default-certificate\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.615293 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-ca\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.615460 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.616192 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-config\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.617020 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39d336a2-fdb0-4500-a751-b771672ae2bd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.618065 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6eb664e0-cd97-4267-a6bc-523316711be5-proxy-tls\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.618102 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-client\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.618696 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-metrics-tls\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.618887 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-node-bootstrap-token\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.619065 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5e6df402-f20e-4982-810e-5664640bb0ea-apiservice-cert\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.619382 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5aad7266-a7c4-45fb-bbed-e1a1277681ef-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.619562 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/eb006550-0648-416e-8bb0-48a5baa22a11-etcd-service-ca\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.619699 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eb006550-0648-416e-8bb0-48a5baa22a11-serving-cert\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.621552 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c3c7089-d98e-4504-91c2-27851ed21d16-secret-volume\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.624064 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5e6df402-f20e-4982-810e-5664640bb0ea-webhook-cert\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.624058 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z8ps\" (UniqueName: \"kubernetes.io/projected/0118936f-47b4-4d58-956c-572d58803b3f-kube-api-access-4z8ps\") pod \"cluster-image-registry-operator-dc59b4c8b-4ggnn\" (UID: \"0118936f-47b4-4d58-956c-572d58803b3f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.625564 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e98a1486-4c73-4895-923a-dc3b4dcead56-profile-collector-cert\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.627100 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-stats-auth\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.627379 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-certs\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.628916 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn22z\" (UniqueName: \"kubernetes.io/projected/f279daa8-7a4e-405d-b7af-499a2179001f-kube-api-access-vn22z\") pod \"openshift-config-operator-7777fb866f-4znl7\" (UID: \"f279daa8-7a4e-405d-b7af-499a2179001f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.630672 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/876d6207-8976-4d02-887b-b431a4821eab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcq84\" (UID: \"876d6207-8976-4d02-887b-b431a4821eab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.635866 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.640073 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3bf6ab88-b524-4a66-9fff-f873646d11d9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.662580 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpnnd\" (UniqueName: \"kubernetes.io/projected/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-kube-api-access-gpnnd\") pod \"oauth-openshift-558db77b4-xv8s4\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.665963 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.678817 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.684627 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5525z\" (UniqueName: \"kubernetes.io/projected/802caeaa-43a4-4cca-b946-5b561df185a8-kube-api-access-5525z\") pod \"apiserver-76f77b778f-rkmmm\" (UID: \"802caeaa-43a4-4cca-b946-5b561df185a8\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.702851 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8qdq\" (UniqueName: \"kubernetes.io/projected/b06fff6c-bfa9-4b23-81d4-7cfc8ec52173-kube-api-access-m8qdq\") pod \"openshift-controller-manager-operator-756b6f6bc6-r4wck\" (UID: \"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.703599 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.703705 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.203686664 +0000 UTC m=+92.201671711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.704491 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.704969 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.204950826 +0000 UTC m=+92.202935873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.724389 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4fcl\" (UniqueName: \"kubernetes.io/projected/41c5f641-6c5c-4b3b-89d7-2a265cab36ca-kube-api-access-x4fcl\") pod \"apiserver-7bbb656c7d-8kth8\" (UID: \"41c5f641-6c5c-4b3b-89d7-2a265cab36ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.741095 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdfw2\" (UniqueName: \"kubernetes.io/projected/3d27eb32-1a56-4d8c-9504-4b7fa8261df6-kube-api-access-pdfw2\") pod \"cluster-samples-operator-665b6dd947-8tkv7\" (UID: \"3d27eb32-1a56-4d8c-9504-4b7fa8261df6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.750222 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.764044 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzhhp\" (UniqueName: \"kubernetes.io/projected/51bf4935-7bd2-4059-9869-60fc8db46d82-kube-api-access-dzhhp\") pod \"dns-operator-744455d44c-zjlzh\" (UID: \"51bf4935-7bd2-4059-9869-60fc8db46d82\") " pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.796937 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhtgw\" (UniqueName: \"kubernetes.io/projected/3bf6ab88-b524-4a66-9fff-f873646d11d9-kube-api-access-lhtgw\") pod \"ingress-operator-5b745b69d9-bbq9v\" (UID: \"3bf6ab88-b524-4a66-9fff-f873646d11d9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.801465 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.808287 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-654v6"] Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.808776 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.810061 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.310042753 +0000 UTC m=+92.308027800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.811214 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-bound-sa-token\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.825383 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.832300 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jskrd\" (UniqueName: \"kubernetes.io/projected/c805746f-1e3e-488f-abda-4584a3028187-kube-api-access-jskrd\") pod \"authentication-operator-69f744f599-fv562\" (UID: \"c805746f-1e3e-488f-abda-4584a3028187\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.839142 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.848943 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.849333 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a190f2de-91d0-40a2-b1cc-f9c191d56ce5-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-77qvr\" (UID: \"a190f2de-91d0-40a2-b1cc-f9c191d56ce5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: W1003 13:32:57.853808 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6a84618_0674_4410_ab88_6896066b6077.slice/crio-a154c518d8f13f980c687014a54da3cc027487c02e5bd17ed36aab394cfff116 WatchSource:0}: Error finding container a154c518d8f13f980c687014a54da3cc027487c02e5bd17ed36aab394cfff116: Status 404 returned error can't find the container with id a154c518d8f13f980c687014a54da3cc027487c02e5bd17ed36aab394cfff116 Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.862787 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.864802 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cszvg\" (UniqueName: \"kubernetes.io/projected/a7c2774d-2d73-439d-94d7-6c184b05bf91-kube-api-access-cszvg\") pod \"openshift-apiserver-operator-796bbdcf4f-5p9tt\" (UID: \"a7c2774d-2d73-439d-94d7-6c184b05bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.869955 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.882083 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.885994 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.900448 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.902176 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49pjb\" (UniqueName: \"kubernetes.io/projected/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-kube-api-access-49pjb\") pod \"console-f9d7485db-v6tz9\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.909285 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4znl7"] Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.911847 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:57 crc kubenswrapper[4861]: E1003 13:32:57.912296 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.412285427 +0000 UTC m=+92.410270474 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.917330 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7hbj\" (UniqueName: \"kubernetes.io/projected/04154cd6-a67c-42d3-bbb0-951c4986390d-kube-api-access-t7hbj\") pod \"machine-api-operator-5694c8668f-rg4gb\" (UID: \"04154cd6-a67c-42d3-bbb0-951c4986390d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.950266 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5aad7266-a7c4-45fb-bbed-e1a1277681ef-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xxf9s\" (UID: \"5aad7266-a7c4-45fb-bbed-e1a1277681ef\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.975068 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.976815 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64t7v\" (UniqueName: \"kubernetes.io/projected/9844df32-3946-46fe-90cb-0cc7fedd4af5-kube-api-access-64t7v\") pod \"service-ca-9c57cc56f-6lkw8\" (UID: \"9844df32-3946-46fe-90cb-0cc7fedd4af5\") " pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:57 crc kubenswrapper[4861]: I1003 13:32:57.988858 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vr65\" (UniqueName: \"kubernetes.io/projected/b35950ee-9000-4269-a58d-f1d2c4563f05-kube-api-access-4vr65\") pod \"downloads-7954f5f757-rc9jz\" (UID: \"b35950ee-9000-4269-a58d-f1d2c4563f05\") " pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:32:58 crc kubenswrapper[4861]: W1003 13:32:58.009575 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda190f2de_91d0_40a2_b1cc_f9c191d56ce5.slice/crio-fe9ab0a8cb2c0bf83f154aa069b90647d4b5120879a383799ab40093b992ddde WatchSource:0}: Error finding container fe9ab0a8cb2c0bf83f154aa069b90647d4b5120879a383799ab40093b992ddde: Status 404 returned error can't find the container with id fe9ab0a8cb2c0bf83f154aa069b90647d4b5120879a383799ab40093b992ddde Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.011217 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhpkc\" (UniqueName: \"kubernetes.io/projected/5b53b21d-af9a-4c3a-9b32-30cc1de6cea1-kube-api-access-vhpkc\") pod \"machine-config-operator-74547568cd-k46fg\" (UID: \"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.012747 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.013342 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.51332605 +0000 UTC m=+92.511311087 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.017730 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.027902 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.040469 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.044306 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm2l4\" (UniqueName: \"kubernetes.io/projected/c4da994b-a677-44ec-a87a-5ae4fdd5e943-kube-api-access-xm2l4\") pod \"olm-operator-6b444d44fb-rjkcl\" (UID: \"c4da994b-a677-44ec-a87a-5ae4fdd5e943\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.046697 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vdwf\" (UniqueName: \"kubernetes.io/projected/5e6df402-f20e-4982-810e-5664640bb0ea-kube-api-access-4vdwf\") pod \"packageserver-d55dfcdfc-nzdn9\" (UID: \"5e6df402-f20e-4982-810e-5664640bb0ea\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.058049 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.070447 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9vds\" (UniqueName: \"kubernetes.io/projected/5f40fe6a-d0b3-41ee-8193-6ad449e3f8df-kube-api-access-s9vds\") pod \"machine-config-server-q7c5v\" (UID: \"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df\") " pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.077889 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.084167 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-q7c5v" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.091254 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39d336a2-fdb0-4500-a751-b771672ae2bd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-f9q2n\" (UID: \"39d336a2-fdb0-4500-a751-b771672ae2bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.114252 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.114575 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.114635 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.61462213 +0000 UTC m=+92.612607177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.133804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpgc8\" (UniqueName: \"kubernetes.io/projected/6eb664e0-cd97-4267-a6bc-523316711be5-kube-api-access-zpgc8\") pod \"machine-config-controller-84d6567774-jkg6x\" (UID: \"6eb664e0-cd97-4267-a6bc-523316711be5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.137597 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7jgz\" (UniqueName: \"kubernetes.io/projected/e0993744-e39f-4dbf-a726-7580528ca825-kube-api-access-d7jgz\") pod \"kube-storage-version-migrator-operator-b67b599dd-584sd\" (UID: \"e0993744-e39f-4dbf-a726-7580528ca825\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.152610 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j4cz\" (UniqueName: \"kubernetes.io/projected/217fa116-2a2c-4c70-a13c-370fd7c2ffd7-kube-api-access-2j4cz\") pod \"migrator-59844c95c7-t47dt\" (UID: \"217fa116-2a2c-4c70-a13c-370fd7c2ffd7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.164078 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" event={"ID":"f279daa8-7a4e-405d-b7af-499a2179001f","Type":"ContainerStarted","Data":"4598196ab7ad188c801b3b648a4e6a2f1f465cb82cc48089b944f2aa47293804"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.172866 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzw98\" (UniqueName: \"kubernetes.io/projected/5b4e4f2b-6854-4b02-9e1a-79aeff76d109-kube-api-access-qzw98\") pod \"ingress-canary-5nfml\" (UID: \"5b4e4f2b-6854-4b02-9e1a-79aeff76d109\") " pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.177634 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" event={"ID":"db18a7f3-e40f-4f7c-af49-4920109fc80a","Type":"ContainerStarted","Data":"e9151419fd58fa1f3e08f646fca40e5e60790f8cf0901d6981995220819ecd4c"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.177673 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" event={"ID":"db18a7f3-e40f-4f7c-af49-4920109fc80a","Type":"ContainerStarted","Data":"c52fc0f858a74ab45d87d7c9bee6057fa4d201b3c828111397d8e65a3aa4a059"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.190659 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-l9srr" event={"ID":"cd587cd0-9026-4456-be22-b3ad36ed845f","Type":"ContainerStarted","Data":"22b7808ccb7162ddde928b8309fd3cfbed904cb375771c54b3bb9932b45d457c"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.190701 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-l9srr" event={"ID":"cd587cd0-9026-4456-be22-b3ad36ed845f","Type":"ContainerStarted","Data":"fae17e7f2eddd874c1688df1e18b92a369c2b2925975183aa88c9d03195cc788"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.191716 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.203127 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9spb\" (UniqueName: \"kubernetes.io/projected/dd1fcc3e-9164-4f36-8082-8458a06f6ce9-kube-api-access-b9spb\") pod \"router-default-5444994796-46jp8\" (UID: \"dd1fcc3e-9164-4f36-8082-8458a06f6ce9\") " pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.211253 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.213880 4861 patch_prober.go:28] interesting pod/console-operator-58897d9998-l9srr container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.213927 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-l9srr" podUID="cd587cd0-9026-4456-be22-b3ad36ed845f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.213992 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" event={"ID":"a190f2de-91d0-40a2-b1cc-f9c191d56ce5","Type":"ContainerStarted","Data":"fe9ab0a8cb2c0bf83f154aa069b90647d4b5120879a383799ab40093b992ddde"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.215545 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjgz7\" (UniqueName: \"kubernetes.io/projected/f23bb6a0-acb7-4ada-96a2-73a978d75125-kube-api-access-zjgz7\") pod \"marketplace-operator-79b997595-bj6mh\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.215938 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.216643 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.716613298 +0000 UTC m=+92.714598525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.218344 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.219536 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" event={"ID":"a6a84618-0674-4410-ab88-6896066b6077","Type":"ContainerStarted","Data":"a154c518d8f13f980c687014a54da3cc027487c02e5bd17ed36aab394cfff116"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.230617 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.231096 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" event={"ID":"ecce72ba-8490-4169-b5ef-df628ee12cdb","Type":"ContainerStarted","Data":"4783e592708682d0991d64ea7f44aa55c65d27c2d711404e8af15e2824f8b7a1"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.231154 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" event={"ID":"ecce72ba-8490-4169-b5ef-df628ee12cdb","Type":"ContainerStarted","Data":"218bb1b49cfa56993534ac7f50c4a5ebff42bf2a761c5b15dc6f2273e745b5f1"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.231164 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" event={"ID":"ecce72ba-8490-4169-b5ef-df628ee12cdb","Type":"ContainerStarted","Data":"bdcbddf49518352884620b47be3966cfac7e36027f739d1f1711880f308f7649"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.231701 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.242493 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.257482 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.258265 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7wl5\" (UniqueName: \"kubernetes.io/projected/16af6fa6-f789-48cd-8ec1-b84f169fe3a9-kube-api-access-h7wl5\") pod \"dns-default-6gqgm\" (UID: \"16af6fa6-f789-48cd-8ec1-b84f169fe3a9\") " pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.262608 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slf47\" (UniqueName: \"kubernetes.io/projected/876d6207-8976-4d02-887b-b431a4821eab-kube-api-access-slf47\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcq84\" (UID: \"876d6207-8976-4d02-887b-b431a4821eab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.263766 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.284171 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmx9k\" (UniqueName: \"kubernetes.io/projected/88db1fac-4c48-400a-9eee-f5c11d8dd12e-kube-api-access-dmx9k\") pod \"csi-hostpathplugin-kgpdn\" (UID: \"88db1fac-4c48-400a-9eee-f5c11d8dd12e\") " pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.284296 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" event={"ID":"6f825716-7012-4f64-925d-fdf69a3b8b28","Type":"ContainerStarted","Data":"dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.284349 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" event={"ID":"6f825716-7012-4f64-925d-fdf69a3b8b28","Type":"ContainerStarted","Data":"8892703c17b4be57ae35707c84eab3755fc33d1e950b8d5138c6ae8a78e45626"} Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.284726 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5nfml" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.287496 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.287873 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.291401 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6wpm\" (UniqueName: \"kubernetes.io/projected/5aaec6a4-8da9-4ac8-a792-131254ad8e23-kube-api-access-m6wpm\") pod \"package-server-manager-789f6589d5-lzfdk\" (UID: \"5aaec6a4-8da9-4ac8-a792-131254ad8e23\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.299673 4861 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-gjvs9 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.299727 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" podUID="6f825716-7012-4f64-925d-fdf69a3b8b28" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.301633 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zjlzh"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.308491 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.319861 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.321635 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.821617843 +0000 UTC m=+92.819602890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.326437 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k865v\" (UniqueName: \"kubernetes.io/projected/b546d058-2a13-4ed4-ad3b-061d9870f9ec-kube-api-access-k865v\") pod \"service-ca-operator-777779d784-c7d78\" (UID: \"b546d058-2a13-4ed4-ad3b-061d9870f9ec\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.335602 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc2sr\" (UniqueName: \"kubernetes.io/projected/0c3c7089-d98e-4504-91c2-27851ed21d16-kube-api-access-tc2sr\") pod \"collect-profiles-29324970-q4nf6\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.344600 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.364124 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfdmx\" (UniqueName: \"kubernetes.io/projected/b430fcf9-3c59-4aa8-b554-9e85cf2fcb40-kube-api-access-bfdmx\") pod \"multus-admission-controller-857f4d67dd-s84lp\" (UID: \"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.381037 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkvqc\" (UniqueName: \"kubernetes.io/projected/e98a1486-4c73-4895-923a-dc3b4dcead56-kube-api-access-mkvqc\") pod \"catalog-operator-68c6474976-xt2zh\" (UID: \"e98a1486-4c73-4895-923a-dc3b4dcead56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.386906 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.387608 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.388735 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgl9f\" (UniqueName: \"kubernetes.io/projected/eb006550-0648-416e-8bb0-48a5baa22a11-kube-api-access-vgl9f\") pod \"etcd-operator-b45778765-zpskq\" (UID: \"eb006550-0648-416e-8bb0-48a5baa22a11\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.402807 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6gqgm" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.421859 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.423185 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7"] Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.423316 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:58.923298213 +0000 UTC m=+92.921283260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.523148 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.523394 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.023384881 +0000 UTC m=+93.021369928 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.554491 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.564747 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.576577 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.586642 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkmmm"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.599211 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.623804 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.624062 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.124011814 +0000 UTC m=+93.121996861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.624512 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.624870 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.124856196 +0000 UTC m=+93.122841243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.630548 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.731558 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.741790 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.742534 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.242507634 +0000 UTC m=+93.240492671 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.796328 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.811398 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xv8s4"] Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.844484 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.844884 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.344870371 +0000 UTC m=+93.342855408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.949067 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.949209 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.449183418 +0000 UTC m=+93.447168465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:58 crc kubenswrapper[4861]: I1003 13:32:58.949514 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:58 crc kubenswrapper[4861]: E1003 13:32:58.949790 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.449771954 +0000 UTC m=+93.447757001 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.021674 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fv562"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.050124 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.050490 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.550475829 +0000 UTC m=+93.548460876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.051956 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6lkw8"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.052218 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.062321 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.154540 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.155142 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.655122524 +0000 UTC m=+93.653107571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.246133 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rg4gb"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.256201 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.256643 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.756623929 +0000 UTC m=+93.754608976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: W1003 13:32:59.262755 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9844df32_3946_46fe_90cb_0cc7fedd4af5.slice/crio-046e30bbc7dfc181e43d22ab0cb7e6c5d19d62c494f9dc384a16b293d3f46863 WatchSource:0}: Error finding container 046e30bbc7dfc181e43d22ab0cb7e6c5d19d62c494f9dc384a16b293d3f46863: Status 404 returned error can't find the container with id 046e30bbc7dfc181e43d22ab0cb7e6c5d19d62c494f9dc384a16b293d3f46863 Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.304642 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-htfhq" podStartSLOduration=71.304625576 podStartE2EDuration="1m11.304625576s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:59.30164572 +0000 UTC m=+93.299630757" watchObservedRunningTime="2025-10-03 13:32:59.304625576 +0000 UTC m=+93.302610643" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.346509 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-v6tz9"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.359638 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.360778 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.361168 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.861154612 +0000 UTC m=+93.859139659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.423840 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" event={"ID":"9844df32-3946-46fe-90cb-0cc7fedd4af5","Type":"ContainerStarted","Data":"046e30bbc7dfc181e43d22ab0cb7e6c5d19d62c494f9dc384a16b293d3f46863"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.438619 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.438991 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-l9srr" podStartSLOduration=71.438977011 podStartE2EDuration="1m11.438977011s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:59.436822607 +0000 UTC m=+93.434807664" watchObservedRunningTime="2025-10-03 13:32:59.438977011 +0000 UTC m=+93.436962058" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.441136 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" event={"ID":"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89","Type":"ContainerStarted","Data":"b687c2bb1fe6c4730f8e3eb0f6bbd2458756de3f9e608fa4b47b572d5a15336c"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.462855 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.463245 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:32:59.963207682 +0000 UTC m=+93.961192729 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.471454 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" event={"ID":"51bf4935-7bd2-4059-9869-60fc8db46d82","Type":"ContainerStarted","Data":"2c83abda8565fb94606217eeaa57554431840cd58f4d4b5b381776312cfc1710"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.507003 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" event={"ID":"0118936f-47b4-4d58-956c-572d58803b3f","Type":"ContainerStarted","Data":"f81f386a8c58fd46ac1f8cffd46bd730ab1a5dd10c447ecd788d2dac17b7bb2b"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.507055 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" event={"ID":"0118936f-47b4-4d58-956c-572d58803b3f","Type":"ContainerStarted","Data":"d77415904b8ff6e3854088cd50c134e9819e51ff48c42937f70fd8258560dee2"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.531388 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.536664 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" event={"ID":"c4da994b-a677-44ec-a87a-5ae4fdd5e943","Type":"ContainerStarted","Data":"ef8f2da7dfa9ccf522f185f8a0b3d45e213b56332f0d5c27585ec4907d561113"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.540925 4861 generic.go:334] "Generic (PLEG): container finished" podID="f279daa8-7a4e-405d-b7af-499a2179001f" containerID="d368eed8bc00833ea2f7ccfe73e5169788fbbcce1e9fd11af146e8434db319d8" exitCode=0 Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.542946 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" event={"ID":"f279daa8-7a4e-405d-b7af-499a2179001f","Type":"ContainerDied","Data":"d368eed8bc00833ea2f7ccfe73e5169788fbbcce1e9fd11af146e8434db319d8"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.551993 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" event={"ID":"3d27eb32-1a56-4d8c-9504-4b7fa8261df6","Type":"ContainerStarted","Data":"8212813e363eb54ddc67801337bb38eed0f570c3bd0de3f28e3913c321cb530b"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.565785 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.568661 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.068646517 +0000 UTC m=+94.066631564 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.576165 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" event={"ID":"a190f2de-91d0-40a2-b1cc-f9c191d56ce5","Type":"ContainerStarted","Data":"1726f3a686d781be12289a186c6ee048ea84746412ca180ab789c6391711e672"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.582672 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" event={"ID":"a6a84618-0674-4410-ab88-6896066b6077","Type":"ContainerStarted","Data":"ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.582735 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.586542 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-q7c5v" event={"ID":"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df","Type":"ContainerStarted","Data":"1e96135d9c4da8d6aff774b24521cb842a4cd3672c6014eedbc2a71a0de3a9d4"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.587507 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" event={"ID":"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173","Type":"ContainerStarted","Data":"fe8df8742b834e6698c2ba5323ee8c75a3dccbbfe75731584ddc14506e92c51e"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.594790 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" event={"ID":"802caeaa-43a4-4cca-b946-5b561df185a8","Type":"ContainerStarted","Data":"bf3670b0035db9ac96cbb9f0e20f4a0895dc8c5aea6feb7c8cf33768941137df"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.598535 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" event={"ID":"c805746f-1e3e-488f-abda-4584a3028187","Type":"ContainerStarted","Data":"8d71ecf048015f038d26220e248625de9219c5eca52225dce31507df3414f271"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.603401 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" event={"ID":"a7c2774d-2d73-439d-94d7-6c184b05bf91","Type":"ContainerStarted","Data":"96c9c95ec10b1d1e23eaea88566d8c257e4a996d7252e3d54a793873bb7d21c2"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.628331 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-46jp8" event={"ID":"dd1fcc3e-9164-4f36-8082-8458a06f6ce9","Type":"ContainerStarted","Data":"3505acb4279d7cb2a0df08afde7dea163bf42d30c0b418fd2508ee979b29f378"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.639143 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" event={"ID":"41c5f641-6c5c-4b3b-89d7-2a265cab36ca","Type":"ContainerStarted","Data":"c75906c3daa3564762a08eab9798723a66a18cbe61ed3748e16a78e6d478037b"} Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.671518 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.676302 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.171856406 +0000 UTC m=+94.169841483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.676504 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.677997 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.177981823 +0000 UTC m=+94.175966870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.716248 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84"] Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.777838 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.778437 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.27840622 +0000 UTC m=+94.276391387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.779011 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.790361 4861 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-654v6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.791264 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" podUID="a6a84618-0674-4410-ab88-6896066b6077" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.800842 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.300390822 +0000 UTC m=+94.298375859 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.805106 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.821694 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=2.821656196 podStartE2EDuration="2.821656196s" podCreationTimestamp="2025-10-03 13:32:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:59.821128132 +0000 UTC m=+93.819113169" watchObservedRunningTime="2025-10-03 13:32:59.821656196 +0000 UTC m=+93.819641243" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.882852 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.883288 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.383269382 +0000 UTC m=+94.381254429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:32:59 crc kubenswrapper[4861]: W1003 13:32:59.911511 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6eb664e0_cd97_4267_a6bc_523316711be5.slice/crio-dfbdd7aef8a15e87be6864032745b9cc38dd9433c2c9ea28d378914b4172afbe WatchSource:0}: Error finding container dfbdd7aef8a15e87be6864032745b9cc38dd9433c2c9ea28d378914b4172afbe: Status 404 returned error can't find the container with id dfbdd7aef8a15e87be6864032745b9cc38dd9433c2c9ea28d378914b4172afbe Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.927549 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" podStartSLOduration=71.927512652 podStartE2EDuration="1m11.927512652s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:59.866334299 +0000 UTC m=+93.864319346" watchObservedRunningTime="2025-10-03 13:32:59.927512652 +0000 UTC m=+93.925497699" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.962584 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8gdwj" podStartSLOduration=71.962545268 podStartE2EDuration="1m11.962545268s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:32:59.909957174 +0000 UTC m=+93.907942221" watchObservedRunningTime="2025-10-03 13:32:59.962545268 +0000 UTC m=+93.960530315" Oct 03 13:32:59 crc kubenswrapper[4861]: I1003 13:32:59.987836 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:32:59 crc kubenswrapper[4861]: E1003 13:32:59.988125 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.488113283 +0000 UTC m=+94.486098330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.088368 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.088755 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.588740004 +0000 UTC m=+94.586725051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.190480 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.190991 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.690970978 +0000 UTC m=+94.688956025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.208472 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" podStartSLOduration=72.208437385 podStartE2EDuration="1m12.208437385s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:00.19959532 +0000 UTC m=+94.197580367" watchObservedRunningTime="2025-10-03 13:33:00.208437385 +0000 UTC m=+94.206422432" Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.217125 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-l9srr" Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.291670 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.292045 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.792028083 +0000 UTC m=+94.790013130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.307177 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4ggnn" podStartSLOduration=72.306944494 podStartE2EDuration="1m12.306944494s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:00.305572509 +0000 UTC m=+94.303557556" watchObservedRunningTime="2025-10-03 13:33:00.306944494 +0000 UTC m=+94.304929541" Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.311785 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-77qvr" podStartSLOduration=72.311766017 podStartE2EDuration="1m12.311766017s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:00.267062334 +0000 UTC m=+94.265047381" watchObservedRunningTime="2025-10-03 13:33:00.311766017 +0000 UTC m=+94.309751064" Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.323080 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rc9jz"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.392975 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.393659 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.89364041 +0000 UTC m=+94.891625517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.493708 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.493925 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.993897544 +0000 UTC m=+94.991882591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.496458 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.496966 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:00.996931761 +0000 UTC m=+94.994916808 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.560589 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.563578 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5nfml"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.597257 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.597555 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.097541114 +0000 UTC m=+95.095526161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.617996 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.627966 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6gqgm"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.677907 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.698917 4861 generic.go:334] "Generic (PLEG): container finished" podID="802caeaa-43a4-4cca-b946-5b561df185a8" containerID="13d022aa6450c8e4397a7f9db3f7fecfeffa11a1fb3febdcb878d4e99632d12b" exitCode=0 Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.706810 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.708550 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.208502921 +0000 UTC m=+95.206487968 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.739278 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v6tz9" event={"ID":"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1","Type":"ContainerStarted","Data":"80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.739322 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v6tz9" event={"ID":"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1","Type":"ContainerStarted","Data":"a3d0110a3237c4ce9031546a1c0bf63697f314adbf9d8190a434dde58f7d2eca"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.739340 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.739359 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" event={"ID":"802caeaa-43a4-4cca-b946-5b561df185a8","Type":"ContainerDied","Data":"13d022aa6450c8e4397a7f9db3f7fecfeffa11a1fb3febdcb878d4e99632d12b"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.739992 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" event={"ID":"876d6207-8976-4d02-887b-b431a4821eab","Type":"ContainerStarted","Data":"74c6e7d6ff4dc98cb119030291038905647fa5432ee6c43e7a69958b38460fe3"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.743032 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" event={"ID":"04154cd6-a67c-42d3-bbb0-951c4986390d","Type":"ContainerStarted","Data":"9b96404ab21c799a9ec1990d431f1feb32126a688b37e6f2bf2b2966ec39be68"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.750578 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-v6tz9" podStartSLOduration=72.744969203 podStartE2EDuration="1m12.744969203s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:00.730710119 +0000 UTC m=+94.728695176" watchObservedRunningTime="2025-10-03 13:33:00.744969203 +0000 UTC m=+94.742954250" Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.761127 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kgpdn"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.768042 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bj6mh"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.772432 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.776369 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rc9jz" event={"ID":"b35950ee-9000-4269-a58d-f1d2c4563f05","Type":"ContainerStarted","Data":"b37e01b47704495fcfd60d57264579f8fece65ce20cb710bf0d022d52a2edc91"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.780543 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" event={"ID":"6eb664e0-cd97-4267-a6bc-523316711be5","Type":"ContainerStarted","Data":"dfbdd7aef8a15e87be6864032745b9cc38dd9433c2c9ea28d378914b4172afbe"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.787219 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.803648 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.808670 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.809370 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.309341869 +0000 UTC m=+95.307326916 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.809932 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.812218 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.312201122 +0000 UTC m=+95.310186259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.822512 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s84lp"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.836130 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zpskq"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.841588 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" event={"ID":"b06fff6c-bfa9-4b23-81d4-7cfc8ec52173","Type":"ContainerStarted","Data":"e088721268a120b0a96b28ced9cc39efb6c4d9d5014c9d00a3a4a215e3dbe530"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.849658 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-c7d78"] Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.866622 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" event={"ID":"3bf6ab88-b524-4a66-9fff-f873646d11d9","Type":"ContainerStarted","Data":"9fe4c7afeba3d20fe0912157ce3ee06981d0ad9b991e48bcd5cd4060d4802a15"} Oct 03 13:33:00 crc kubenswrapper[4861]: W1003 13:33:00.866714 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb006550_0648_416e_8bb0_48a5baa22a11.slice/crio-baa3839f5e25590762682af3862e0091e73cb4128efbb7490e9493f2c5a8ea47 WatchSource:0}: Error finding container baa3839f5e25590762682af3862e0091e73cb4128efbb7490e9493f2c5a8ea47: Status 404 returned error can't find the container with id baa3839f5e25590762682af3862e0091e73cb4128efbb7490e9493f2c5a8ea47 Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.875296 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-r4wck" podStartSLOduration=72.875275954 podStartE2EDuration="1m12.875275954s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:00.86416456 +0000 UTC m=+94.862149607" watchObservedRunningTime="2025-10-03 13:33:00.875275954 +0000 UTC m=+94.873261001" Oct 03 13:33:00 crc kubenswrapper[4861]: W1003 13:33:00.901606 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb546d058_2a13_4ed4_ad3b_061d9870f9ec.slice/crio-ac6dbf57fef7b3929e3dbed4c8438ea3a636e672d483c9dcb7b47736c586b5f9 WatchSource:0}: Error finding container ac6dbf57fef7b3929e3dbed4c8438ea3a636e672d483c9dcb7b47736c586b5f9: Status 404 returned error can't find the container with id ac6dbf57fef7b3929e3dbed4c8438ea3a636e672d483c9dcb7b47736c586b5f9 Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.907891 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" event={"ID":"5aad7266-a7c4-45fb-bbed-e1a1277681ef","Type":"ContainerStarted","Data":"c5bd71b42581a3ce7b0328393225f034ebbd34a5518545a30f69d30251e6a252"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.914985 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.915179 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.415124164 +0000 UTC m=+95.413109211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.915671 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:00 crc kubenswrapper[4861]: E1003 13:33:00.916194 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.41617691 +0000 UTC m=+95.414161957 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:00 crc kubenswrapper[4861]: W1003 13:33:00.924963 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b53b21d_af9a_4c3a_9b32_30cc1de6cea1.slice/crio-8ff5e6d347ea27a8855e267ec2c1a38b8c34ebec0380fa5a0b679b8b6d3208f6 WatchSource:0}: Error finding container 8ff5e6d347ea27a8855e267ec2c1a38b8c34ebec0380fa5a0b679b8b6d3208f6: Status 404 returned error can't find the container with id 8ff5e6d347ea27a8855e267ec2c1a38b8c34ebec0380fa5a0b679b8b6d3208f6 Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.929126 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" event={"ID":"39d336a2-fdb0-4500-a751-b771672ae2bd","Type":"ContainerStarted","Data":"f545001e0aaec6eb1d61425395af6fc626783b7e121f28282201124619039b4e"} Oct 03 13:33:00 crc kubenswrapper[4861]: W1003 13:33:00.929494 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb430fcf9_3c59_4aa8_b554_9e85cf2fcb40.slice/crio-b423df4f309c6ec45873a9c19b871952b2967a41b767aa4a0abadff2a9251db5 WatchSource:0}: Error finding container b423df4f309c6ec45873a9c19b871952b2967a41b767aa4a0abadff2a9251db5: Status 404 returned error can't find the container with id b423df4f309c6ec45873a9c19b871952b2967a41b767aa4a0abadff2a9251db5 Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.949421 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-q7c5v" event={"ID":"5f40fe6a-d0b3-41ee-8193-6ad449e3f8df","Type":"ContainerStarted","Data":"797edc4d1f4c0fecd7d6f369a5cebf87e5c42fa55026927bbe223daf1dc51a0b"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.974793 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-q7c5v" podStartSLOduration=5.974773998 podStartE2EDuration="5.974773998s" podCreationTimestamp="2025-10-03 13:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:00.973970748 +0000 UTC m=+94.971955795" watchObservedRunningTime="2025-10-03 13:33:00.974773998 +0000 UTC m=+94.972759045" Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.978292 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" event={"ID":"9844df32-3946-46fe-90cb-0cc7fedd4af5","Type":"ContainerStarted","Data":"dabf8b271ca87ee459930916c84f9ccb2ab86581b09c4ea3a9ae6b4ec3a2684f"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.980771 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" event={"ID":"3d27eb32-1a56-4d8c-9504-4b7fa8261df6","Type":"ContainerStarted","Data":"c6b9903be1783c82829c684c494c163cd3c0b078ea8437ab4a42c8750244139e"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.984772 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-46jp8" event={"ID":"dd1fcc3e-9164-4f36-8082-8458a06f6ce9","Type":"ContainerStarted","Data":"742d300c4a7b4e5b2e3f4739f091fef970a99493eba23e415db365ef88607a8b"} Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.986488 4861 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-654v6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 03 13:33:00 crc kubenswrapper[4861]: I1003 13:33:00.986525 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" podUID="a6a84618-0674-4410-ab88-6896066b6077" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.002319 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6lkw8" podStartSLOduration=73.002300022 podStartE2EDuration="1m13.002300022s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:01.00144858 +0000 UTC m=+94.999433647" watchObservedRunningTime="2025-10-03 13:33:01.002300022 +0000 UTC m=+95.000285069" Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.016786 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.018294 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.51827287 +0000 UTC m=+95.516257917 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.121038 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.124666 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.62464474 +0000 UTC m=+95.622629787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.222980 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.223679 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.723659952 +0000 UTC m=+95.721644999 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.264772 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.276116 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.276202 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.324681 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.325037 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.825017383 +0000 UTC m=+95.823002430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.425980 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.426149 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.926130999 +0000 UTC m=+95.924116046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.426473 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.426891 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:01.926884338 +0000 UTC m=+95.924869385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.528294 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.528536 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.028492646 +0000 UTC m=+96.026477693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.528995 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.529343 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.029325897 +0000 UTC m=+96.027310944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.629782 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.630169 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.130155015 +0000 UTC m=+96.128140062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.708066 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-46jp8" podStartSLOduration=73.708048187 podStartE2EDuration="1m13.708048187s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:01.028972324 +0000 UTC m=+95.026957381" watchObservedRunningTime="2025-10-03 13:33:01.708048187 +0000 UTC m=+95.706033234" Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.709398 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.731206 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.732279 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.232260886 +0000 UTC m=+96.230246013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.831980 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.832437 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.332421797 +0000 UTC m=+96.330406844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.933932 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:01 crc kubenswrapper[4861]: E1003 13:33:01.934265 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.43425321 +0000 UTC m=+96.432238257 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.990682 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6gqgm" event={"ID":"16af6fa6-f789-48cd-8ec1-b84f169fe3a9","Type":"ContainerStarted","Data":"590471c761826c6cec857388b25750c31067090a6b917792319d9eba310462dc"} Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.990735 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6gqgm" event={"ID":"16af6fa6-f789-48cd-8ec1-b84f169fe3a9","Type":"ContainerStarted","Data":"538abb32698c4c304a7217896ac66b96e19fc127d71fdc40ae92d48c3bebd659"} Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.992527 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" event={"ID":"6eb664e0-cd97-4267-a6bc-523316711be5","Type":"ContainerStarted","Data":"1ab1798391a73cfcb6e661e6485b5467117565687cf8f46af5544d39b2462037"} Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.994063 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5nfml" event={"ID":"5b4e4f2b-6854-4b02-9e1a-79aeff76d109","Type":"ContainerStarted","Data":"0b44e438ba3731e0bb2dd368bb511b681769c3b249baeeb6a28f08962b6c62b6"} Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.994096 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5nfml" event={"ID":"5b4e4f2b-6854-4b02-9e1a-79aeff76d109","Type":"ContainerStarted","Data":"88acefb5a253658e8b779565beadf59c25a6d780b11f5f8a123da8355e995256"} Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.995805 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" event={"ID":"c805746f-1e3e-488f-abda-4584a3028187","Type":"ContainerStarted","Data":"fd0dfba7965f0acf62227ccd33cd982d22edefc6d457546d3276f3d8473a77e8"} Oct 03 13:33:01 crc kubenswrapper[4861]: I1003 13:33:01.998793 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" event={"ID":"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89","Type":"ContainerStarted","Data":"57d3c5249df6882323924c5029b19e87f77757713781c3671eb64dd4a532dd2b"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.000121 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" event={"ID":"f23bb6a0-acb7-4ada-96a2-73a978d75125","Type":"ContainerStarted","Data":"2544c81ddfd2d0d2c87e5532b8d5a5c2e3f4dd56967cbbc27ed75b5dde740b7d"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.001804 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" event={"ID":"5aaec6a4-8da9-4ac8-a792-131254ad8e23","Type":"ContainerStarted","Data":"3ebb1873e3afa9033edf2dabcb31f75ec78a507db9c61cdacc4164f0c9671154"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.003094 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" event={"ID":"c4da994b-a677-44ec-a87a-5ae4fdd5e943","Type":"ContainerStarted","Data":"114ec03789f654d1ba1b8086115cd15edeb826687ebdd34ebfe6759578f9ba86"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.003255 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.003776 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" event={"ID":"0c3c7089-d98e-4504-91c2-27851ed21d16","Type":"ContainerStarted","Data":"907a8d3b740c70a318d513e8f58fe3218eb1412cf271da766aee70577798d5e6"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.004910 4861 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rjkcl container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.004943 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" podUID="c4da994b-a677-44ec-a87a-5ae4fdd5e943" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.006038 4861 generic.go:334] "Generic (PLEG): container finished" podID="41c5f641-6c5c-4b3b-89d7-2a265cab36ca" containerID="a4e47f3f14aa9639751bdea7994d8e4b95e74f7676b9560fb4300d0095a6ef54" exitCode=0 Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.006104 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" event={"ID":"41c5f641-6c5c-4b3b-89d7-2a265cab36ca","Type":"ContainerDied","Data":"a4e47f3f14aa9639751bdea7994d8e4b95e74f7676b9560fb4300d0095a6ef54"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.008096 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" event={"ID":"b546d058-2a13-4ed4-ad3b-061d9870f9ec","Type":"ContainerStarted","Data":"ac6dbf57fef7b3929e3dbed4c8438ea3a636e672d483c9dcb7b47736c586b5f9"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.011317 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" event={"ID":"5e6df402-f20e-4982-810e-5664640bb0ea","Type":"ContainerStarted","Data":"74285a5fc7f575d0382a48c6fb1fe4657d61c02d4954b8fc206708740afbae44"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.011344 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" event={"ID":"5e6df402-f20e-4982-810e-5664640bb0ea","Type":"ContainerStarted","Data":"029cdf8a6ff58d7a9cd0bce36a6cdb5693f895e76142b488be428c4cbf9d9b64"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.012477 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" event={"ID":"a7c2774d-2d73-439d-94d7-6c184b05bf91","Type":"ContainerStarted","Data":"978879bd331e3c07dc2891c66ce7e37db100e83cf8b4aca83da73398fd68a16d"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.013673 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" event={"ID":"217fa116-2a2c-4c70-a13c-370fd7c2ffd7","Type":"ContainerStarted","Data":"b35e6665a3c8cb0baa574eeb2092ca2e38de538ccb714ad5c083b8473e04366d"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.013696 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" event={"ID":"217fa116-2a2c-4c70-a13c-370fd7c2ffd7","Type":"ContainerStarted","Data":"dee138f6f0904ec5723f597816ba064f2e658686462d894d1dadfc54566ce8a6"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.014278 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" event={"ID":"eb006550-0648-416e-8bb0-48a5baa22a11","Type":"ContainerStarted","Data":"baa3839f5e25590762682af3862e0091e73cb4128efbb7490e9493f2c5a8ea47"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.015276 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" event={"ID":"5aad7266-a7c4-45fb-bbed-e1a1277681ef","Type":"ContainerStarted","Data":"7dd9607e69f1ee8bb7243926812b9519602d5c82a1723f62c99098d79e9abfac"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.016289 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" event={"ID":"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40","Type":"ContainerStarted","Data":"b423df4f309c6ec45873a9c19b871952b2967a41b767aa4a0abadff2a9251db5"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.016976 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" event={"ID":"e98a1486-4c73-4895-923a-dc3b4dcead56","Type":"ContainerStarted","Data":"e915de8fcd66d3bb95f386c5513344f490870dff7bd71120bbf9f302f1e39ba8"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.017832 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" event={"ID":"876d6207-8976-4d02-887b-b431a4821eab","Type":"ContainerStarted","Data":"8d36b9ae74f06f543bebd517bc77ece72bd80cb2030e455352aa3b459ae413ab"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.019010 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" event={"ID":"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1","Type":"ContainerStarted","Data":"8ff5e6d347ea27a8855e267ec2c1a38b8c34ebec0380fa5a0b679b8b6d3208f6"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.020274 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rc9jz" event={"ID":"b35950ee-9000-4269-a58d-f1d2c4563f05","Type":"ContainerStarted","Data":"aaf8882246cdc1e1c340073f13a49fd3978f0b0764f89a376fbdee5b8edb1289"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.021406 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.021679 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.021726 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.023403 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" event={"ID":"3d27eb32-1a56-4d8c-9504-4b7fa8261df6","Type":"ContainerStarted","Data":"ea4dcd048bc9abe47693526e5b3c653049770cdac4839e85243239f8ff72b87b"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.025422 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" event={"ID":"04154cd6-a67c-42d3-bbb0-951c4986390d","Type":"ContainerStarted","Data":"43ba0d66c3276739a388af1879551a7ab5d294d5e2f04cfbba72050e5b851379"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.025458 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" event={"ID":"04154cd6-a67c-42d3-bbb0-951c4986390d","Type":"ContainerStarted","Data":"21d57d1b1c75ff40aa8abaacfa9d2a4a3df5a532df63405de7a784ab8d53df62"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.028613 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" event={"ID":"802caeaa-43a4-4cca-b946-5b561df185a8","Type":"ContainerStarted","Data":"42e5037f3765e4ec60b453a9d4e4a083c1cd9a3c1538aedee1b3b4ffa9de1474"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.030354 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" event={"ID":"88db1fac-4c48-400a-9eee-f5c11d8dd12e","Type":"ContainerStarted","Data":"0e793763ee249d9290208796652dfc9af7063ef1f14b460b6af28dde4ff28670"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.031743 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" event={"ID":"e0993744-e39f-4dbf-a726-7580528ca825","Type":"ContainerStarted","Data":"21b668b43f9cde6876b17c59fc3df05fa9cef774082dfb3e13dc58e619b8f9d4"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.035087 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.035224 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.535205393 +0000 UTC m=+96.533190440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.035276 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=1.035258644 podStartE2EDuration="1.035258644s" podCreationTimestamp="2025-10-03 13:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.032621316 +0000 UTC m=+96.030606373" watchObservedRunningTime="2025-10-03 13:33:02.035258644 +0000 UTC m=+96.033243691" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.035503 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.037062 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.537050159 +0000 UTC m=+96.535035206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.038079 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" event={"ID":"f279daa8-7a4e-405d-b7af-499a2179001f","Type":"ContainerStarted","Data":"bb37a564355d3320617c56dc9b30fc7ee635c7873514bb9adeb93a154579c04d"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.038309 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.049166 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" event={"ID":"3bf6ab88-b524-4a66-9fff-f873646d11d9","Type":"ContainerStarted","Data":"334c7862a214e1abbf71f1de2ce878081a1c9712d0c69e01a3da2e99f9af0904"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.051052 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" event={"ID":"39d336a2-fdb0-4500-a751-b771672ae2bd","Type":"ContainerStarted","Data":"9f387748a1f70cb43c699121065ced81e5a670a3defbcaddf830545a594e5dcb"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.052540 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" event={"ID":"51bf4935-7bd2-4059-9869-60fc8db46d82","Type":"ContainerStarted","Data":"56adc43a0e9a774418968225291c43eff972fe9c9a2ef5300793ab9dec8650cf"} Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.063488 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-fv562" podStartSLOduration=74.063473715 podStartE2EDuration="1m14.063473715s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.062582222 +0000 UTC m=+96.060567279" watchObservedRunningTime="2025-10-03 13:33:02.063473715 +0000 UTC m=+96.061458762" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.136535 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.136670 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.636639916 +0000 UTC m=+96.634624963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.136780 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.138595 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.638583925 +0000 UTC m=+96.636568972 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.191705 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-rc9jz" podStartSLOduration=74.191683203 podStartE2EDuration="1m14.191683203s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.19000948 +0000 UTC m=+96.187994527" watchObservedRunningTime="2025-10-03 13:33:02.191683203 +0000 UTC m=+96.189668420" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.209148 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-5p9tt" podStartSLOduration=74.209129959 podStartE2EDuration="1m14.209129959s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.208879493 +0000 UTC m=+96.206864540" watchObservedRunningTime="2025-10-03 13:33:02.209129959 +0000 UTC m=+96.207114996" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.240303 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.240649 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.740635714 +0000 UTC m=+96.738620761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.255770 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" podStartSLOduration=74.255751911 podStartE2EDuration="1m14.255751911s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.239715851 +0000 UTC m=+96.237700898" watchObservedRunningTime="2025-10-03 13:33:02.255751911 +0000 UTC m=+96.253736958" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.257750 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8tkv7" podStartSLOduration=74.257743032 podStartE2EDuration="1m14.257743032s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.255624848 +0000 UTC m=+96.253609895" watchObservedRunningTime="2025-10-03 13:33:02.257743032 +0000 UTC m=+96.255728079" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.260399 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.260478 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.271797 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xxf9s" podStartSLOduration=74.271782761 podStartE2EDuration="1m14.271782761s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.270470708 +0000 UTC m=+96.268455755" watchObservedRunningTime="2025-10-03 13:33:02.271782761 +0000 UTC m=+96.269767808" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.291616 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcq84" podStartSLOduration=74.291600828 podStartE2EDuration="1m14.291600828s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.29133197 +0000 UTC m=+96.289317017" watchObservedRunningTime="2025-10-03 13:33:02.291600828 +0000 UTC m=+96.289585875" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.317718 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" podStartSLOduration=74.317700765 podStartE2EDuration="1m14.317700765s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:02.315183311 +0000 UTC m=+96.313168368" watchObservedRunningTime="2025-10-03 13:33:02.317700765 +0000 UTC m=+96.315685832" Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.341924 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.342332 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.842318694 +0000 UTC m=+96.840303741 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.443175 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.443357 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.943335827 +0000 UTC m=+96.941320884 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.443492 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.443837 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:02.943827809 +0000 UTC m=+96.941812856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.545087 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.545350 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.045323485 +0000 UTC m=+97.043308522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.647287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.647603 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.1475887 +0000 UTC m=+97.145573747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.748001 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.748399 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.248385117 +0000 UTC m=+97.246370164 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.850152 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.850499 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.350483537 +0000 UTC m=+97.348468584 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.951221 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.951430 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.451396512 +0000 UTC m=+97.449381559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:02 crc kubenswrapper[4861]: I1003 13:33:02.951514 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:02 crc kubenswrapper[4861]: E1003 13:33:02.951809 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.451800282 +0000 UTC m=+97.449785329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.052656 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.052816 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.552790294 +0000 UTC m=+97.550775341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.052959 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.053312 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.553301947 +0000 UTC m=+97.551287064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.060205 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" event={"ID":"6eb664e0-cd97-4267-a6bc-523316711be5","Type":"ContainerStarted","Data":"6c1bec0ac07f01510be212bbe780199fbf4003fa8cd9f47b6a3a5916237e2ee1"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.062471 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" event={"ID":"0c3c7089-d98e-4504-91c2-27851ed21d16","Type":"ContainerStarted","Data":"b9ea19e2e924a668a8c9fd3450ddc14d94b6a5f9c680b84da2a77ad9e4691963"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.063665 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" event={"ID":"f23bb6a0-acb7-4ada-96a2-73a978d75125","Type":"ContainerStarted","Data":"930ee4d553236fa442f10c4236d0fd2ee0ac0a7bb4c6c033a3b3c47d11a58589"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.065123 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" event={"ID":"5aaec6a4-8da9-4ac8-a792-131254ad8e23","Type":"ContainerStarted","Data":"8de3018c48f436fbf4b18995c4be2cc6e26f1f68d920a37266acac35914b42b4"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.067517 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" event={"ID":"51bf4935-7bd2-4059-9869-60fc8db46d82","Type":"ContainerStarted","Data":"081c6d3d575868e1ae6650851a26c8f02d41a10d09feeefe3520caa73fb3f49b"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.074605 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" event={"ID":"b546d058-2a13-4ed4-ad3b-061d9870f9ec","Type":"ContainerStarted","Data":"87c4b81e0691a578e70c145aade499828aab8e4b30e7b8c9da80525576ebe1a3"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.080246 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" event={"ID":"802caeaa-43a4-4cca-b946-5b561df185a8","Type":"ContainerStarted","Data":"9dcc1e1189c790d230019d2ef811e4127399649c38adaab4040468ffed7a872d"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.085637 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" event={"ID":"41c5f641-6c5c-4b3b-89d7-2a265cab36ca","Type":"ContainerStarted","Data":"918aa8dd82f87479772a1665134487c4cd478bf331359887da4499d83aa83ba0"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.091464 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" event={"ID":"3bf6ab88-b524-4a66-9fff-f873646d11d9","Type":"ContainerStarted","Data":"2ec2b6b5e0e55fa7f50d0cedafcd87c395db7d8478a133f642f6364928e75567"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.104241 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" event={"ID":"e98a1486-4c73-4895-923a-dc3b4dcead56","Type":"ContainerStarted","Data":"044ddf1fb8ce489c659df3c76cc257954a9e7d3caf06e8b47ba247d48b07e578"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.104995 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.113766 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" event={"ID":"eb006550-0648-416e-8bb0-48a5baa22a11","Type":"ContainerStarted","Data":"90f49330c28c72a7fe637f9351490c00dd81558ec4d21749e6d0c27c662ba3cd"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.115326 4861 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-xt2zh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.115358 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" podUID="e98a1486-4c73-4895-923a-dc3b4dcead56" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.119101 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" event={"ID":"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1","Type":"ContainerStarted","Data":"89a85275a3f5424e7dd07c66577fcada2df8890608540783ef3d27e4513c5db2"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.129457 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-c7d78" podStartSLOduration=75.129437355 podStartE2EDuration="1m15.129437355s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.127778543 +0000 UTC m=+97.125763590" watchObservedRunningTime="2025-10-03 13:33:03.129437355 +0000 UTC m=+97.127422402" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.141713 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" event={"ID":"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40","Type":"ContainerStarted","Data":"9a08acc2f30d552fa7c3a69749a3a6a0d5ebd2210f1d8e4f353da2a27aa5f705"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.155716 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.156033 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.655994012 +0000 UTC m=+97.653979059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.156239 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.157612 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.657602092 +0000 UTC m=+97.655587139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.157997 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bbq9v" podStartSLOduration=75.157979442 podStartE2EDuration="1m15.157979442s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.153997991 +0000 UTC m=+97.151983038" watchObservedRunningTime="2025-10-03 13:33:03.157979442 +0000 UTC m=+97.155964489" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.163607 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" event={"ID":"e0993744-e39f-4dbf-a726-7580528ca825","Type":"ContainerStarted","Data":"d5a3817e4193a1b1af220880b6bc4bf943d88f99332349fb4e9b6842023d18e5"} Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.165921 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.165972 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.166591 4861 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rjkcl container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.166613 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" podUID="c4da994b-a677-44ec-a87a-5ae4fdd5e943" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.166860 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.167401 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.171098 4861 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-nzdn9 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.171132 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" podUID="5e6df402-f20e-4982-810e-5664640bb0ea" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.172042 4861 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-4znl7 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.172063 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" podUID="f279daa8-7a4e-405d-b7af-499a2179001f" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.172114 4861 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-xv8s4 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.22:6443/healthz\": dial tcp 10.217.0.22:6443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.172130 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.22:6443/healthz\": dial tcp 10.217.0.22:6443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.259192 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.263602 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.763576741 +0000 UTC m=+97.761561788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.265473 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-5nfml" podStartSLOduration=8.265450939 podStartE2EDuration="8.265450939s" podCreationTimestamp="2025-10-03 13:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.255407513 +0000 UTC m=+97.253392570" watchObservedRunningTime="2025-10-03 13:33:03.265450939 +0000 UTC m=+97.263435996" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.270331 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" podStartSLOduration=75.270313023 podStartE2EDuration="1m15.270313023s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.201397817 +0000 UTC m=+97.199382874" watchObservedRunningTime="2025-10-03 13:33:03.270313023 +0000 UTC m=+97.268298070" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.288836 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:03 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:03 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:03 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.288942 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.333128 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" podStartSLOduration=75.333111332 podStartE2EDuration="1m15.333111332s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.331466999 +0000 UTC m=+97.329452046" watchObservedRunningTime="2025-10-03 13:33:03.333111332 +0000 UTC m=+97.331096389" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.358618 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-f9q2n" podStartSLOduration=75.358597861 podStartE2EDuration="1m15.358597861s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.356652091 +0000 UTC m=+97.354637138" watchObservedRunningTime="2025-10-03 13:33:03.358597861 +0000 UTC m=+97.356582908" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.361890 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.362142 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.86213146 +0000 UTC m=+97.860116507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.390449 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-rg4gb" podStartSLOduration=75.390433171 podStartE2EDuration="1m15.390433171s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.388555923 +0000 UTC m=+97.386540970" watchObservedRunningTime="2025-10-03 13:33:03.390433171 +0000 UTC m=+97.388418218" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.434045 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" podStartSLOduration=75.434030572 podStartE2EDuration="1m15.434030572s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.427223238 +0000 UTC m=+97.425208285" watchObservedRunningTime="2025-10-03 13:33:03.434030572 +0000 UTC m=+97.432015629" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.452003 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-584sd" podStartSLOduration=75.451976648 podStartE2EDuration="1m15.451976648s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:03.451366723 +0000 UTC m=+97.449351780" watchObservedRunningTime="2025-10-03 13:33:03.451976648 +0000 UTC m=+97.449961695" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.462356 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.462545 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.962513387 +0000 UTC m=+97.960498434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.462673 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.463007 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:03.962995838 +0000 UTC m=+97.960980885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.564161 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.564380 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.06436345 +0000 UTC m=+98.062348497 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.564463 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.564735 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.064726969 +0000 UTC m=+98.062712016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.665284 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.665574 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.165547486 +0000 UTC m=+98.163532533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.665835 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.666238 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.166207943 +0000 UTC m=+98.164192990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.666981 4861 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-4znl7 container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.667028 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" podUID="f279daa8-7a4e-405d-b7af-499a2179001f" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.666990 4861 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-4znl7 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.667327 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" podUID="f279daa8-7a4e-405d-b7af-499a2179001f" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.767152 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.767446 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.267429151 +0000 UTC m=+98.265414198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.868184 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.868538 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.368526845 +0000 UTC m=+98.366511892 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.969562 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:03 crc kubenswrapper[4861]: E1003 13:33:03.969915 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.469900026 +0000 UTC m=+98.467885073 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.994869 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 13:33:03 crc kubenswrapper[4861]: I1003 13:33:03.995576 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.000490 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.027089 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.029412 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.071160 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5c7b582-9486-4cea-af64-01d70c01ad7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.071410 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.071521 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5c7b582-9486-4cea-af64-01d70c01ad7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.071797 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.57178248 +0000 UTC m=+98.569767707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.171652 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" event={"ID":"5aaec6a4-8da9-4ac8-a792-131254ad8e23","Type":"ContainerStarted","Data":"dafd070d8ec7cc68f4e37e0534a1f83a36f0c49f0c8397bd6a9a371f1cfeaab0"} Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.171771 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.172168 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.172478 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.672457244 +0000 UTC m=+98.670442291 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.172559 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5c7b582-9486-4cea-af64-01d70c01ad7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.172709 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5c7b582-9486-4cea-af64-01d70c01ad7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.172814 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.172925 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5c7b582-9486-4cea-af64-01d70c01ad7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.173656 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" event={"ID":"b430fcf9-3c59-4aa8-b554-9e85cf2fcb40","Type":"ContainerStarted","Data":"359bd2d36a2af776aeaaea133aee5289b6e51deb750e60ad25b5fd14355759b2"} Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.173722 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.673708996 +0000 UTC m=+98.671694043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.176161 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" event={"ID":"217fa116-2a2c-4c70-a13c-370fd7c2ffd7","Type":"ContainerStarted","Data":"9ac5c7f12b98d011c63727ce92d3058b61eee6c09a097bafd223e2e339343cfc"} Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.177971 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" event={"ID":"5b53b21d-af9a-4c3a-9b32-30cc1de6cea1","Type":"ContainerStarted","Data":"cdab77452bf2a6edb5d0e6f342c76b7b8a7eb1212c3ee6615f2679744bb59550"} Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.180525 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6gqgm" event={"ID":"16af6fa6-f789-48cd-8ec1-b84f169fe3a9","Type":"ContainerStarted","Data":"7a1bb9365c331a2ecd47cb7134aece9b447972c6832e1cc8241d318094c2ee3b"} Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.180557 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6gqgm" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.184070 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.184095 4861 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-xt2zh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.184117 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" podUID="e98a1486-4c73-4895-923a-dc3b4dcead56" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.184112 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.185042 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.185584 4861 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-xv8s4 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.22:6443/healthz\": dial tcp 10.217.0.22:6443: connect: connection refused" start-of-body= Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.185634 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.22:6443/healthz\": dial tcp 10.217.0.22:6443: connect: connection refused" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.185669 4861 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-nzdn9 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.185691 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" podUID="5e6df402-f20e-4982-810e-5664640bb0ea" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.186924 4861 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bj6mh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.186958 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.205811 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5c7b582-9486-4cea-af64-01d70c01ad7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.231044 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" podStartSLOduration=76.231026535 podStartE2EDuration="1m16.231026535s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.228702986 +0000 UTC m=+98.226688033" watchObservedRunningTime="2025-10-03 13:33:04.231026535 +0000 UTC m=+98.229011582" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.256192 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t47dt" podStartSLOduration=76.256176226 podStartE2EDuration="1m16.256176226s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.252183454 +0000 UTC m=+98.250168501" watchObservedRunningTime="2025-10-03 13:33:04.256176226 +0000 UTC m=+98.254161273" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.261722 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:04 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:04 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:04 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.261791 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.273994 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.275706 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.775656672 +0000 UTC m=+98.773641719 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.315169 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.375912 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.376265 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.876222423 +0000 UTC m=+98.874207470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.405269 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jkg6x" podStartSLOduration=76.405244551 podStartE2EDuration="1m16.405244551s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.321741425 +0000 UTC m=+98.319726472" watchObservedRunningTime="2025-10-03 13:33:04.405244551 +0000 UTC m=+98.403229598" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.407606 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-zjlzh" podStartSLOduration=76.407596991 podStartE2EDuration="1m16.407596991s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.397584006 +0000 UTC m=+98.395569053" watchObservedRunningTime="2025-10-03 13:33:04.407596991 +0000 UTC m=+98.405582038" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.479353 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.479765 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:04.979749638 +0000 UTC m=+98.977734685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.495088 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" podStartSLOduration=76.495071798 podStartE2EDuration="1m16.495071798s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.484503079 +0000 UTC m=+98.482488126" watchObservedRunningTime="2025-10-03 13:33:04.495071798 +0000 UTC m=+98.493056835" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.581438 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.581902 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.081885899 +0000 UTC m=+99.079870946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.638661 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" podStartSLOduration=76.638632264 podStartE2EDuration="1m16.638632264s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.637953766 +0000 UTC m=+98.635938823" watchObservedRunningTime="2025-10-03 13:33:04.638632264 +0000 UTC m=+98.636617311" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.638779 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-s84lp" podStartSLOduration=76.638774418 podStartE2EDuration="1m16.638774418s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.580012701 +0000 UTC m=+98.577997748" watchObservedRunningTime="2025-10-03 13:33:04.638774418 +0000 UTC m=+98.636759465" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.686793 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.687168 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.18715233 +0000 UTC m=+99.185137377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.704791 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" podStartSLOduration=76.704656785 podStartE2EDuration="1m16.704656785s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.684862981 +0000 UTC m=+98.682848028" watchObservedRunningTime="2025-10-03 13:33:04.704656785 +0000 UTC m=+98.702641842" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.788508 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.788905 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.28888567 +0000 UTC m=+99.286870717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.802017 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6gqgm" podStartSLOduration=9.801995144 podStartE2EDuration="9.801995144s" podCreationTimestamp="2025-10-03 13:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.752600056 +0000 UTC m=+98.750585123" watchObservedRunningTime="2025-10-03 13:33:04.801995144 +0000 UTC m=+98.799980191" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.890577 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.890784 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.390756304 +0000 UTC m=+99.388741351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.891126 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.891901 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.391881813 +0000 UTC m=+99.389866860 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.897864 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k46fg" podStartSLOduration=76.897842224 podStartE2EDuration="1m16.897842224s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.802538388 +0000 UTC m=+98.800523445" watchObservedRunningTime="2025-10-03 13:33:04.897842224 +0000 UTC m=+98.895827281" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.942149 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zpskq" podStartSLOduration=76.942115242 podStartE2EDuration="1m16.942115242s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.90315016 +0000 UTC m=+98.901135207" watchObservedRunningTime="2025-10-03 13:33:04.942115242 +0000 UTC m=+98.940100299" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.945570 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.980812 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" podStartSLOduration=76.980792896 podStartE2EDuration="1m16.980792896s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:04.979925665 +0000 UTC m=+98.977910712" watchObservedRunningTime="2025-10-03 13:33:04.980792896 +0000 UTC m=+98.978777943" Oct 03 13:33:04 crc kubenswrapper[4861]: I1003 13:33:04.996762 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:04 crc kubenswrapper[4861]: E1003 13:33:04.997305 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.497253766 +0000 UTC m=+99.495238813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.098806 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.099416 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.599392316 +0000 UTC m=+99.597377363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.189684 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e5c7b582-9486-4cea-af64-01d70c01ad7c","Type":"ContainerStarted","Data":"1b45864a6d21b682b44b52ffffe92425a2916df423276798069a1f41fb1f09a4"} Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.198740 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" event={"ID":"88db1fac-4c48-400a-9eee-f5c11d8dd12e","Type":"ContainerStarted","Data":"7ef80d413d3b12db7437fda26713d653967b3662c361e31c7e39fac3abbae43f"} Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.199205 4861 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bj6mh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.199214 4861 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-xt2zh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.199256 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" podUID="e98a1486-4c73-4895-923a-dc3b4dcead56" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.199256 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.199646 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.199943 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.699926926 +0000 UTC m=+99.697911973 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.200039 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.200350 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.700342187 +0000 UTC m=+99.698327224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.262511 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:05 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:05 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:05 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.262564 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.301067 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.301265 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.801238106 +0000 UTC m=+99.799223153 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.301439 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.304917 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.804904109 +0000 UTC m=+99.802889146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.402797 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.402900 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.902885044 +0000 UTC m=+99.900870081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.403097 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.403382 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:05.903373516 +0000 UTC m=+99.901358553 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.504283 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.504465 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.00444252 +0000 UTC m=+100.002427567 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.504728 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.504986 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.004978194 +0000 UTC m=+100.002963241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.606137 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.606325 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.106300814 +0000 UTC m=+100.104285861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.606443 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.606761 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.106752555 +0000 UTC m=+100.104737602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.707509 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.708156 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.208139987 +0000 UTC m=+100.206125034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.809324 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.809600 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.30958752 +0000 UTC m=+100.307572567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:05 crc kubenswrapper[4861]: I1003 13:33:05.909810 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:05 crc kubenswrapper[4861]: E1003 13:33:05.910447 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.410427027 +0000 UTC m=+100.408412074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.012637 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.013009 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.51299436 +0000 UTC m=+100.510979407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.114015 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.114188 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.614172506 +0000 UTC m=+100.612157553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.114293 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.114611 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.614593916 +0000 UTC m=+100.612578963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.215279 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.215721 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.715704141 +0000 UTC m=+100.713689188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.222501 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e5c7b582-9486-4cea-af64-01d70c01ad7c","Type":"ContainerStarted","Data":"f0251d395657745570232f340655dfb8dbb79d76dfdf236cc580a4f5b446b698"} Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.271749 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:06 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:06 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:06 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.271809 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.317478 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.318216 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.818202081 +0000 UTC m=+100.816187128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.418473 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.418657 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.918624648 +0000 UTC m=+100.916609695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.418762 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.419049 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:06.919019388 +0000 UTC m=+100.917004436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.519500 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.519676 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.01965014 +0000 UTC m=+101.017635187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.519766 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.520071 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.020055731 +0000 UTC m=+101.018040828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.620756 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.621034 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.621503 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.121481313 +0000 UTC m=+101.119466360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.646836 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/267128bb-f8b8-4d69-99a3-ba3af795218c-metrics-certs\") pod \"network-metrics-daemon-cft42\" (UID: \"267128bb-f8b8-4d69-99a3-ba3af795218c\") " pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.672728 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4znl7" Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.722892 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.724035 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.224018004 +0000 UTC m=+101.222003041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.769323 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.7693056780000003 podStartE2EDuration="3.769305678s" podCreationTimestamp="2025-10-03 13:33:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:06.268040334 +0000 UTC m=+100.266025381" watchObservedRunningTime="2025-10-03 13:33:06.769305678 +0000 UTC m=+100.767290715" Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.799562 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cft42" Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.824707 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.824859 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.324833391 +0000 UTC m=+101.322818428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.824936 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.825221 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.325209801 +0000 UTC m=+101.323194848 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:06 crc kubenswrapper[4861]: I1003 13:33:06.925934 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:06 crc kubenswrapper[4861]: E1003 13:33:06.926284 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.426266834 +0000 UTC m=+101.424251881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.027487 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.027841 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.52783055 +0000 UTC m=+101.525815587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.128112 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.128253 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.628236377 +0000 UTC m=+101.626221424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.128477 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.128750 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.62874266 +0000 UTC m=+101.626727707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.234722 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.235006 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.734988315 +0000 UTC m=+101.732973362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.249734 4861 generic.go:334] "Generic (PLEG): container finished" podID="e5c7b582-9486-4cea-af64-01d70c01ad7c" containerID="f0251d395657745570232f340655dfb8dbb79d76dfdf236cc580a4f5b446b698" exitCode=0 Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.249781 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e5c7b582-9486-4cea-af64-01d70c01ad7c","Type":"ContainerDied","Data":"f0251d395657745570232f340655dfb8dbb79d76dfdf236cc580a4f5b446b698"} Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.282039 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:07 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:07 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:07 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.282098 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.335718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.336089 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.83607766 +0000 UTC m=+101.834062707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.399752 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-cft42"] Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.437118 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.437518 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:07.937501182 +0000 UTC m=+101.935486229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.539210 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.539587 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.039571391 +0000 UTC m=+102.037556438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.614966 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.640411 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.641141 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.141119596 +0000 UTC m=+102.139104643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.752033 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.752332 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.252317908 +0000 UTC m=+102.250302955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.840726 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.841067 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.852922 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.853417 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.353398362 +0000 UTC m=+102.351383409 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.866687 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.866740 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.876262 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.903539 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:33:07 crc kubenswrapper[4861]: I1003 13:33:07.954299 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:07 crc kubenswrapper[4861]: E1003 13:33:07.955442 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.45542175 +0000 UTC m=+102.453406887 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.036053 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-69rqg"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.037222 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.055346 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.055752 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.555733744 +0000 UTC m=+102.553718791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.061196 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.078203 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rjkcl" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.079479 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.079531 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.081326 4861 patch_prober.go:28] interesting pod/console-f9d7485db-v6tz9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.081366 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-v6tz9" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.156808 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.156896 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-catalog-content\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.156935 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-utilities\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.156977 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg4jb\" (UniqueName: \"kubernetes.io/projected/3656f402-8d9e-4401-83a5-8367adb5b0f0-kube-api-access-lg4jb\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.158054 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.658041639 +0000 UTC m=+102.656026686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.212930 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.212968 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.212990 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.213054 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.238149 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-69rqg"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.248843 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nzdn9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.258180 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.258577 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.258757 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-catalog-content\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.258781 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-utilities\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.258815 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg4jb\" (UniqueName: \"kubernetes.io/projected/3656f402-8d9e-4401-83a5-8367adb5b0f0-kube-api-access-lg4jb\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.258881 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.758863237 +0000 UTC m=+102.756848274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.259185 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-catalog-content\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.259347 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-utilities\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.264539 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:08 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:08 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:08 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.264601 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.267451 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cft42" event={"ID":"267128bb-f8b8-4d69-99a3-ba3af795218c","Type":"ContainerStarted","Data":"7f06e18c97ce42fbb62c45d88f3ee43b83ad47e38ee3d1fbffe4b3be4e9f5c43"} Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.267494 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cft42" event={"ID":"267128bb-f8b8-4d69-99a3-ba3af795218c","Type":"ContainerStarted","Data":"0bb6290c9cbe9f55637cb53e28d9a5db38b35d555f69b4d06ef00f9cd1ea9c91"} Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.270378 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" event={"ID":"88db1fac-4c48-400a-9eee-f5c11d8dd12e","Type":"ContainerStarted","Data":"9400fbead5f3611d699e2c9ade539b855b71118238912bc66b7fed71eb6dcd2e"} Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.276767 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8kth8" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.312961 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mbpzd"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.314187 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.322748 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.329409 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg4jb\" (UniqueName: \"kubernetes.io/projected/3656f402-8d9e-4401-83a5-8367adb5b0f0-kube-api-access-lg4jb\") pod \"community-operators-69rqg\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.362036 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-catalog-content\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.362402 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.362470 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-utilities\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.362728 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zbh4\" (UniqueName: \"kubernetes.io/projected/b06b44ef-af9a-4253-8f45-44d98adc49bf-kube-api-access-9zbh4\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.363097 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.86308349 +0000 UTC m=+102.861068537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.371497 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.421341 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mbpzd"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.443571 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wmjx9"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.444843 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.445979 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.474278 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.474909 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zbh4\" (UniqueName: \"kubernetes.io/projected/b06b44ef-af9a-4253-8f45-44d98adc49bf-kube-api-access-9zbh4\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.475052 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-catalog-content\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.475175 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-utilities\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.475709 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-utilities\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.475879 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:08.975860832 +0000 UTC m=+102.973845889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.478831 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-catalog-content\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.509924 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wmjx9"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.564211 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zbh4\" (UniqueName: \"kubernetes.io/projected/b06b44ef-af9a-4253-8f45-44d98adc49bf-kube-api-access-9zbh4\") pod \"certified-operators-mbpzd\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.577075 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g684d\" (UniqueName: \"kubernetes.io/projected/bc19f2ef-da47-4f43-8281-0fa924546c1b-kube-api-access-g684d\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.577135 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-utilities\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.577154 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-catalog-content\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.577179 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.578604 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.078592388 +0000 UTC m=+103.076577435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.594028 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wqcjq"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.594905 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.599727 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xt2zh" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.647054 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.678819 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.697683 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-catalog-content\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.697787 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.197751582 +0000 UTC m=+103.195736629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.697848 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtp27\" (UniqueName: \"kubernetes.io/projected/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-kube-api-access-xtp27\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.698047 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-utilities\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.698080 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g684d\" (UniqueName: \"kubernetes.io/projected/bc19f2ef-da47-4f43-8281-0fa924546c1b-kube-api-access-g684d\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.698196 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-utilities\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.698287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-catalog-content\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.698329 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.698778 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.198767518 +0000 UTC m=+103.196752565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.714153 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-catalog-content\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.714733 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-utilities\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.813065 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wqcjq"] Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.814314 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.814565 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-utilities\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.814695 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-catalog-content\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.814770 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtp27\" (UniqueName: \"kubernetes.io/projected/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-kube-api-access-xtp27\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.814900 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.314885405 +0000 UTC m=+103.312870442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.815289 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-utilities\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.815569 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-catalog-content\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.831129 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g684d\" (UniqueName: \"kubernetes.io/projected/bc19f2ef-da47-4f43-8281-0fa924546c1b-kube-api-access-g684d\") pod \"community-operators-wmjx9\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.893276 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtp27\" (UniqueName: \"kubernetes.io/projected/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-kube-api-access-xtp27\") pod \"certified-operators-wqcjq\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.921947 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:08 crc kubenswrapper[4861]: E1003 13:33:08.922288 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.422274908 +0000 UTC m=+103.420259955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:08 crc kubenswrapper[4861]: I1003 13:33:08.971500 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.022782 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.023162 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.523142877 +0000 UTC m=+103.521127924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.100555 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.123862 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.124156 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.62414566 +0000 UTC m=+103.622130707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.226676 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.227088 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.72707442 +0000 UTC m=+103.725059457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.273531 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:09 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:09 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:09 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.273600 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.330309 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.330584 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.830572535 +0000 UTC m=+103.828557572 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.330630 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cft42" event={"ID":"267128bb-f8b8-4d69-99a3-ba3af795218c","Type":"ContainerStarted","Data":"b899287e5d34e3c29ac3ca4371233a942568674fbe5a4771581c0313eb03e205"} Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.359718 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" event={"ID":"88db1fac-4c48-400a-9eee-f5c11d8dd12e","Type":"ContainerStarted","Data":"5293ae78d381becf90b3071a8d3b6d747cef1b2fd6ce8847cc64b0899cbb320f"} Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.360906 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-cft42" podStartSLOduration=81.360885858 podStartE2EDuration="1m21.360885858s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:09.357665015 +0000 UTC m=+103.355650062" watchObservedRunningTime="2025-10-03 13:33:09.360885858 +0000 UTC m=+103.358870905" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.431957 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.432439 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:09.932409769 +0000 UTC m=+103.930394816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.464329 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.534938 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5c7b582-9486-4cea-af64-01d70c01ad7c-kubelet-dir\") pod \"e5c7b582-9486-4cea-af64-01d70c01ad7c\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.534987 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5c7b582-9486-4cea-af64-01d70c01ad7c-kube-api-access\") pod \"e5c7b582-9486-4cea-af64-01d70c01ad7c\" (UID: \"e5c7b582-9486-4cea-af64-01d70c01ad7c\") " Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.535301 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.535604 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.035589865 +0000 UTC m=+104.033574912 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.535653 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e5c7b582-9486-4cea-af64-01d70c01ad7c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e5c7b582-9486-4cea-af64-01d70c01ad7c" (UID: "e5c7b582-9486-4cea-af64-01d70c01ad7c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.550104 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5c7b582-9486-4cea-af64-01d70c01ad7c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e5c7b582-9486-4cea-af64-01d70c01ad7c" (UID: "e5c7b582-9486-4cea-af64-01d70c01ad7c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.550262 4861 patch_prober.go:28] interesting pod/apiserver-76f77b778f-rkmmm container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]log ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]etcd ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/generic-apiserver-start-informers ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/max-in-flight-filter ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 03 13:33:09 crc kubenswrapper[4861]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 03 13:33:09 crc kubenswrapper[4861]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/project.openshift.io-projectcache ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 03 13:33:09 crc kubenswrapper[4861]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 03 13:33:09 crc kubenswrapper[4861]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 03 13:33:09 crc kubenswrapper[4861]: livez check failed Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.550305 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" podUID="802caeaa-43a4-4cca-b946-5b561df185a8" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.558824 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-69rqg"] Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.641774 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.642220 4861 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5c7b582-9486-4cea-af64-01d70c01ad7c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.642254 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5c7b582-9486-4cea-af64-01d70c01ad7c-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.642335 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.142316744 +0000 UTC m=+104.140301781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.745582 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.747164 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.247150873 +0000 UTC m=+104.245135920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.847635 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.848098 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.348078133 +0000 UTC m=+104.346063180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.920094 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.920442 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5c7b582-9486-4cea-af64-01d70c01ad7c" containerName="pruner" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.920459 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5c7b582-9486-4cea-af64-01d70c01ad7c" containerName="pruner" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.920646 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5c7b582-9486-4cea-af64-01d70c01ad7c" containerName="pruner" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.921309 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.927494 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.927735 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.937782 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mbpzd"] Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.937832 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 13:33:09 crc kubenswrapper[4861]: I1003 13:33:09.949203 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:09 crc kubenswrapper[4861]: E1003 13:33:09.949528 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.449514055 +0000 UTC m=+104.447499102 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.043860 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wmjx9"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.051277 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.051843 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f407e73c-b330-4611-9a58-150ae709b761-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.051879 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f407e73c-b330-4611-9a58-150ae709b761-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.051990 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.551971994 +0000 UTC m=+104.549957041 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.078567 4861 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.150506 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nbf9j"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.152113 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.153514 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.153734 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f407e73c-b330-4611-9a58-150ae709b761-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.153848 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f407e73c-b330-4611-9a58-150ae709b761-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.153922 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f407e73c-b330-4611-9a58-150ae709b761-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.154034 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.654022173 +0000 UTC m=+104.652007210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.156562 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.173245 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbf9j"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.184674 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f407e73c-b330-4611-9a58-150ae709b761-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.226781 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wqcjq"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.254838 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.255143 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-utilities\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.255181 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-catalog-content\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.255220 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw66w\" (UniqueName: \"kubernetes.io/projected/299f9a50-345e-4196-b954-1004eb2acd0c-kube-api-access-vw66w\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.255303 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.755270421 +0000 UTC m=+104.753255468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.262608 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:10 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:10 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:10 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.262649 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.301365 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.356010 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-utilities\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.356052 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-catalog-content\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.356084 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw66w\" (UniqueName: \"kubernetes.io/projected/299f9a50-345e-4196-b954-1004eb2acd0c-kube-api-access-vw66w\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.356135 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.356431 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.856420747 +0000 UTC m=+104.854405794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.356894 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-utilities\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.357094 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-catalog-content\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.371027 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.371595 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e5c7b582-9486-4cea-af64-01d70c01ad7c","Type":"ContainerDied","Data":"1b45864a6d21b682b44b52ffffe92425a2916df423276798069a1f41fb1f09a4"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.371632 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b45864a6d21b682b44b52ffffe92425a2916df423276798069a1f41fb1f09a4" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.377969 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw66w\" (UniqueName: \"kubernetes.io/projected/299f9a50-345e-4196-b954-1004eb2acd0c-kube-api-access-vw66w\") pod \"redhat-marketplace-nbf9j\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.379098 4861 generic.go:334] "Generic (PLEG): container finished" podID="0c3c7089-d98e-4504-91c2-27851ed21d16" containerID="b9ea19e2e924a668a8c9fd3450ddc14d94b6a5f9c680b84da2a77ad9e4691963" exitCode=0 Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.379160 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" event={"ID":"0c3c7089-d98e-4504-91c2-27851ed21d16","Type":"ContainerDied","Data":"b9ea19e2e924a668a8c9fd3450ddc14d94b6a5f9c680b84da2a77ad9e4691963"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.394788 4861 generic.go:334] "Generic (PLEG): container finished" podID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerID="a7deffce8358fd5fc414c099c19a02132dccfce0ea4ea378aa40163ae116ab32" exitCode=0 Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.394871 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerDied","Data":"a7deffce8358fd5fc414c099c19a02132dccfce0ea4ea378aa40163ae116ab32"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.394924 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerStarted","Data":"fc1ae8e36f66dd86e8a17966eddc8838ebf87900d7be613a4cd7575151b8ccb8"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.399685 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.411863 4861 generic.go:334] "Generic (PLEG): container finished" podID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerID="9ff5726b5a3ac2f89e4292af71ddcd6f1eac8b092348b249458e3a4a61ccafda" exitCode=0 Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.411939 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerDied","Data":"9ff5726b5a3ac2f89e4292af71ddcd6f1eac8b092348b249458e3a4a61ccafda"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.411966 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerStarted","Data":"b6065b0742374c202b8508ffbe7457f186222cd96b0c138f89c00d652cc74c28"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.421959 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerStarted","Data":"b03e0ab13b39a3ea2b81fa828ee65cff8b45170bd81b51918e0f59f45b6f102b"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.449651 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" event={"ID":"88db1fac-4c48-400a-9eee-f5c11d8dd12e","Type":"ContainerStarted","Data":"e769a6b1e54b76987fb216eb213b9986c62a92f914ce2c1fa106b2df638f4517"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.455486 4861 generic.go:334] "Generic (PLEG): container finished" podID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerID="70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a" exitCode=0 Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.456759 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.458545 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerDied","Data":"70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a"} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.458597 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerStarted","Data":"2616696ce8e1df3903a77d889ad54ca4f560ddce9a653d17670846921979a7dc"} Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.458681 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:10.95863668 +0000 UTC m=+104.956621727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.475292 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.506902 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-kgpdn" podStartSLOduration=15.506876398 podStartE2EDuration="15.506876398s" podCreationTimestamp="2025-10-03 13:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:10.49714523 +0000 UTC m=+104.495130287" watchObservedRunningTime="2025-10-03 13:33:10.506876398 +0000 UTC m=+104.504861455" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.558888 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.559314 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-76l5p"] Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.560595 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.060578445 +0000 UTC m=+105.058563532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.561290 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.564369 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-76l5p"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.661313 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.661525 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.161491605 +0000 UTC m=+105.159476652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.661598 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vsrp\" (UniqueName: \"kubernetes.io/projected/f4dd945c-0af6-46cc-92b5-91768370d81e-kube-api-access-5vsrp\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.661633 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-catalog-content\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.661659 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-utilities\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.661680 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.661938 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.161927456 +0000 UTC m=+105.159912503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.707445 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.762692 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.762901 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.262871346 +0000 UTC m=+105.260856403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.763001 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-catalog-content\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.763048 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-utilities\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.763077 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.763181 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vsrp\" (UniqueName: \"kubernetes.io/projected/f4dd945c-0af6-46cc-92b5-91768370d81e-kube-api-access-5vsrp\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.763684 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-catalog-content\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.763839 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-utilities\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.764070 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.264057867 +0000 UTC m=+105.262042914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.809359 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vsrp\" (UniqueName: \"kubernetes.io/projected/f4dd945c-0af6-46cc-92b5-91768370d81e-kube-api-access-5vsrp\") pod \"redhat-marketplace-76l5p\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.865488 4861 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-03T13:33:10.078820508Z","Handler":null,"Name":""} Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.868161 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.868370 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.368322791 +0000 UTC m=+105.366307838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.868588 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:10 crc kubenswrapper[4861]: E1003 13:33:10.868867 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 13:33:11.368855755 +0000 UTC m=+105.366840802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-t6vlm" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.877397 4861 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.877739 4861 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.881272 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbf9j"] Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.893522 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:33:10 crc kubenswrapper[4861]: I1003 13:33:10.970837 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.049078 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.072032 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.115653 4861 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.115692 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.153073 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-whv8v"] Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.155099 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.164950 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.167107 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-whv8v"] Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.254454 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-t6vlm\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.264694 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:11 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:11 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:11 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.265101 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.275709 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-catalog-content\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.275807 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9crkp\" (UniqueName: \"kubernetes.io/projected/c53f0f06-1b25-44b6-9797-1181e5c79ecf-kube-api-access-9crkp\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.275841 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-utilities\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.354870 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8wtxc"] Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.356353 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378192 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-utilities\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378297 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-catalog-content\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378322 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-catalog-content\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378349 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hmq5\" (UniqueName: \"kubernetes.io/projected/379a9149-fcc4-4958-81b1-a177b2afb908-kube-api-access-9hmq5\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378385 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9crkp\" (UniqueName: \"kubernetes.io/projected/c53f0f06-1b25-44b6-9797-1181e5c79ecf-kube-api-access-9crkp\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378406 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-utilities\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.378910 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-utilities\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.379107 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-catalog-content\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.382039 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-76l5p"] Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.390625 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8wtxc"] Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.393553 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.406197 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9crkp\" (UniqueName: \"kubernetes.io/projected/c53f0f06-1b25-44b6-9797-1181e5c79ecf-kube-api-access-9crkp\") pod \"redhat-operators-whv8v\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.467835 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f407e73c-b330-4611-9a58-150ae709b761","Type":"ContainerStarted","Data":"7496f1e5d06538ed7c9b3d481c343e6233d5c9450c7ae0ee2c29ef598c04d768"} Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.467889 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f407e73c-b330-4611-9a58-150ae709b761","Type":"ContainerStarted","Data":"0619a406009387fcfdecd78edcec03395320ca1ad0c264b3dc1fe8053263c18a"} Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.472604 4861 generic.go:334] "Generic (PLEG): container finished" podID="299f9a50-345e-4196-b954-1004eb2acd0c" containerID="8accb3533b51ad509147e372ac6b5b5370dd1ef3f1b1e81232269dac3f31be02" exitCode=0 Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.472673 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbf9j" event={"ID":"299f9a50-345e-4196-b954-1004eb2acd0c","Type":"ContainerDied","Data":"8accb3533b51ad509147e372ac6b5b5370dd1ef3f1b1e81232269dac3f31be02"} Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.472706 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbf9j" event={"ID":"299f9a50-345e-4196-b954-1004eb2acd0c","Type":"ContainerStarted","Data":"8f14c91771cf75a641155d6934105486b079123222c5814297b538cd03510221"} Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.475590 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerStarted","Data":"a59c5e8acb72e38947f7d3bddc74a52ab04c80e1c8a22afbbf006331a1d92f8a"} Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.479838 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-catalog-content\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.479890 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hmq5\" (UniqueName: \"kubernetes.io/projected/379a9149-fcc4-4958-81b1-a177b2afb908-kube-api-access-9hmq5\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.480003 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-utilities\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.480585 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-utilities\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.480849 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-catalog-content\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.487518 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.488219 4861 generic.go:334] "Generic (PLEG): container finished" podID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerID="c2eb08273bc6a2b5406aa0284dccb13aec9f44dd7d1714c29641afde639eca1e" exitCode=0 Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.489147 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerDied","Data":"c2eb08273bc6a2b5406aa0284dccb13aec9f44dd7d1714c29641afde639eca1e"} Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.510784 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hmq5\" (UniqueName: \"kubernetes.io/projected/379a9149-fcc4-4958-81b1-a177b2afb908-kube-api-access-9hmq5\") pod \"redhat-operators-8wtxc\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.519481 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.519463621 podStartE2EDuration="2.519463621s" podCreationTimestamp="2025-10-03 13:33:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:11.491000166 +0000 UTC m=+105.488985293" watchObservedRunningTime="2025-10-03 13:33:11.519463621 +0000 UTC m=+105.517448668" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.685466 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.866654 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.896842 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c3c7089-d98e-4504-91c2-27851ed21d16-secret-volume\") pod \"0c3c7089-d98e-4504-91c2-27851ed21d16\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.896929 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c3c7089-d98e-4504-91c2-27851ed21d16-config-volume\") pod \"0c3c7089-d98e-4504-91c2-27851ed21d16\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.897086 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tc2sr\" (UniqueName: \"kubernetes.io/projected/0c3c7089-d98e-4504-91c2-27851ed21d16-kube-api-access-tc2sr\") pod \"0c3c7089-d98e-4504-91c2-27851ed21d16\" (UID: \"0c3c7089-d98e-4504-91c2-27851ed21d16\") " Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.900399 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c3c7089-d98e-4504-91c2-27851ed21d16-config-volume" (OuterVolumeSpecName: "config-volume") pod "0c3c7089-d98e-4504-91c2-27851ed21d16" (UID: "0c3c7089-d98e-4504-91c2-27851ed21d16"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.904925 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c3c7089-d98e-4504-91c2-27851ed21d16-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0c3c7089-d98e-4504-91c2-27851ed21d16" (UID: "0c3c7089-d98e-4504-91c2-27851ed21d16"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.918106 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-t6vlm"] Oct 03 13:33:11 crc kubenswrapper[4861]: I1003 13:33:11.937448 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c3c7089-d98e-4504-91c2-27851ed21d16-kube-api-access-tc2sr" (OuterVolumeSpecName: "kube-api-access-tc2sr") pod "0c3c7089-d98e-4504-91c2-27851ed21d16" (UID: "0c3c7089-d98e-4504-91c2-27851ed21d16"). InnerVolumeSpecName "kube-api-access-tc2sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.002061 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c3c7089-d98e-4504-91c2-27851ed21d16-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.002091 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c3c7089-d98e-4504-91c2-27851ed21d16-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.002104 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tc2sr\" (UniqueName: \"kubernetes.io/projected/0c3c7089-d98e-4504-91c2-27851ed21d16-kube-api-access-tc2sr\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.184197 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-whv8v"] Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.217871 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8wtxc"] Oct 03 13:33:12 crc kubenswrapper[4861]: W1003 13:33:12.230086 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod379a9149_fcc4_4958_81b1_a177b2afb908.slice/crio-68eabdc81d584b94f848290523d79bf281f9baf23efcf7241c732ea10b79cae3 WatchSource:0}: Error finding container 68eabdc81d584b94f848290523d79bf281f9baf23efcf7241c732ea10b79cae3: Status 404 returned error can't find the container with id 68eabdc81d584b94f848290523d79bf281f9baf23efcf7241c732ea10b79cae3 Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.264147 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:12 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:12 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:12 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.264199 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.504942 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" event={"ID":"0c3c7089-d98e-4504-91c2-27851ed21d16","Type":"ContainerDied","Data":"907a8d3b740c70a318d513e8f58fe3218eb1412cf271da766aee70577798d5e6"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.505016 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="907a8d3b740c70a318d513e8f58fe3218eb1412cf271da766aee70577798d5e6" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.505087 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.507995 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerStarted","Data":"c2eb7c354fab8a7e55dda4c6bc2f314cf63872406fe67dab48866e6ef4cf8067"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.509551 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" event={"ID":"2ddecabf-87aa-4eda-93a5-eee5c61d3b91","Type":"ContainerStarted","Data":"789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.509574 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" event={"ID":"2ddecabf-87aa-4eda-93a5-eee5c61d3b91","Type":"ContainerStarted","Data":"3796e43f8075237131e19dd0668c9c8479c97bc6b3bb5104757e6ce6a495d5de"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.511210 4861 generic.go:334] "Generic (PLEG): container finished" podID="f407e73c-b330-4611-9a58-150ae709b761" containerID="7496f1e5d06538ed7c9b3d481c343e6233d5c9450c7ae0ee2c29ef598c04d768" exitCode=0 Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.511331 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f407e73c-b330-4611-9a58-150ae709b761","Type":"ContainerDied","Data":"7496f1e5d06538ed7c9b3d481c343e6233d5c9450c7ae0ee2c29ef598c04d768"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.512679 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerStarted","Data":"68eabdc81d584b94f848290523d79bf281f9baf23efcf7241c732ea10b79cae3"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.514420 4861 generic.go:334] "Generic (PLEG): container finished" podID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerID="96a076dea26c844a4835c386aafef188870f5181b3980e10eb8d90768889c085" exitCode=0 Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.514455 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerDied","Data":"96a076dea26c844a4835c386aafef188870f5181b3980e10eb8d90768889c085"} Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.697056 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.859532 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:33:12 crc kubenswrapper[4861]: I1003 13:33:12.874293 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rkmmm" Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.261732 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:13 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:13 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:13 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.261830 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.407104 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6gqgm" Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.539694 4861 generic.go:334] "Generic (PLEG): container finished" podID="379a9149-fcc4-4958-81b1-a177b2afb908" containerID="61e440adae1f2db4f38bd01d355612a899ffbc7d82d4cd2d3cb2c6d5e90ef4a6" exitCode=0 Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.539755 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerDied","Data":"61e440adae1f2db4f38bd01d355612a899ffbc7d82d4cd2d3cb2c6d5e90ef4a6"} Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.543428 4861 generic.go:334] "Generic (PLEG): container finished" podID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerID="7b96793850f176e5c52a45c1c4a69f8dc4f618f694df3255cdd14853d331e5b3" exitCode=0 Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.544363 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerDied","Data":"7b96793850f176e5c52a45c1c4a69f8dc4f618f694df3255cdd14853d331e5b3"} Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.544397 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:13 crc kubenswrapper[4861]: I1003 13:33:13.617260 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" podStartSLOduration=85.617242318 podStartE2EDuration="1m25.617242318s" podCreationTimestamp="2025-10-03 13:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:33:13.612824984 +0000 UTC m=+107.610810051" watchObservedRunningTime="2025-10-03 13:33:13.617242318 +0000 UTC m=+107.615227365" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.103961 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.261924 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:14 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:14 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:14 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.262333 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.273594 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f407e73c-b330-4611-9a58-150ae709b761-kubelet-dir\") pod \"f407e73c-b330-4611-9a58-150ae709b761\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.273774 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f407e73c-b330-4611-9a58-150ae709b761-kube-api-access\") pod \"f407e73c-b330-4611-9a58-150ae709b761\" (UID: \"f407e73c-b330-4611-9a58-150ae709b761\") " Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.274926 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f407e73c-b330-4611-9a58-150ae709b761-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f407e73c-b330-4611-9a58-150ae709b761" (UID: "f407e73c-b330-4611-9a58-150ae709b761"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.313374 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f407e73c-b330-4611-9a58-150ae709b761-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f407e73c-b330-4611-9a58-150ae709b761" (UID: "f407e73c-b330-4611-9a58-150ae709b761"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.375806 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f407e73c-b330-4611-9a58-150ae709b761-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.375836 4861 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f407e73c-b330-4611-9a58-150ae709b761-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.582192 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.583446 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f407e73c-b330-4611-9a58-150ae709b761","Type":"ContainerDied","Data":"0619a406009387fcfdecd78edcec03395320ca1ad0c264b3dc1fe8053263c18a"} Oct 03 13:33:14 crc kubenswrapper[4861]: I1003 13:33:14.583493 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0619a406009387fcfdecd78edcec03395320ca1ad0c264b3dc1fe8053263c18a" Oct 03 13:33:15 crc kubenswrapper[4861]: I1003 13:33:15.261958 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:15 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:15 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:15 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:15 crc kubenswrapper[4861]: I1003 13:33:15.262016 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:16 crc kubenswrapper[4861]: I1003 13:33:16.260860 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:16 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:16 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:16 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:16 crc kubenswrapper[4861]: I1003 13:33:16.261168 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:17 crc kubenswrapper[4861]: I1003 13:33:17.260517 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:17 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:17 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:17 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:17 crc kubenswrapper[4861]: I1003 13:33:17.260591 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.079421 4861 patch_prober.go:28] interesting pod/console-f9d7485db-v6tz9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.079995 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-v6tz9" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.212705 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.212769 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.213077 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.213125 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.261579 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:18 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:18 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:18 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:18 crc kubenswrapper[4861]: I1003 13:33:18.261654 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:19 crc kubenswrapper[4861]: I1003 13:33:19.266848 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:19 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:19 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:19 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:19 crc kubenswrapper[4861]: I1003 13:33:19.266926 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:20 crc kubenswrapper[4861]: I1003 13:33:20.261157 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:20 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:20 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:20 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:20 crc kubenswrapper[4861]: I1003 13:33:20.261863 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:21 crc kubenswrapper[4861]: I1003 13:33:21.260966 4861 patch_prober.go:28] interesting pod/router-default-5444994796-46jp8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 13:33:21 crc kubenswrapper[4861]: [-]has-synced failed: reason withheld Oct 03 13:33:21 crc kubenswrapper[4861]: [+]process-running ok Oct 03 13:33:21 crc kubenswrapper[4861]: healthz check failed Oct 03 13:33:21 crc kubenswrapper[4861]: I1003 13:33:21.261049 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-46jp8" podUID="dd1fcc3e-9164-4f36-8082-8458a06f6ce9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 13:33:22 crc kubenswrapper[4861]: I1003 13:33:22.261561 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:33:22 crc kubenswrapper[4861]: I1003 13:33:22.263597 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-46jp8" Oct 03 13:33:25 crc kubenswrapper[4861]: I1003 13:33:25.844693 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.082904 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.087170 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213000 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213059 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213095 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213152 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213200 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213653 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213685 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213839 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"aaf8882246cdc1e1c340073f13a49fd3978f0b0764f89a376fbdee5b8edb1289"} pod="openshift-console/downloads-7954f5f757-rc9jz" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 03 13:33:28 crc kubenswrapper[4861]: I1003 13:33:28.213953 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" containerID="cri-o://aaf8882246cdc1e1c340073f13a49fd3978f0b0764f89a376fbdee5b8edb1289" gracePeriod=2 Oct 03 13:33:29 crc kubenswrapper[4861]: I1003 13:33:29.778071 4861 generic.go:334] "Generic (PLEG): container finished" podID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerID="aaf8882246cdc1e1c340073f13a49fd3978f0b0764f89a376fbdee5b8edb1289" exitCode=0 Oct 03 13:33:29 crc kubenswrapper[4861]: I1003 13:33:29.778112 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rc9jz" event={"ID":"b35950ee-9000-4269-a58d-f1d2c4563f05","Type":"ContainerDied","Data":"aaf8882246cdc1e1c340073f13a49fd3978f0b0764f89a376fbdee5b8edb1289"} Oct 03 13:33:31 crc kubenswrapper[4861]: I1003 13:33:31.399530 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:33:38 crc kubenswrapper[4861]: I1003 13:33:38.213377 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:38 crc kubenswrapper[4861]: I1003 13:33:38.213694 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:38 crc kubenswrapper[4861]: I1003 13:33:38.349758 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lzfdk" Oct 03 13:33:48 crc kubenswrapper[4861]: I1003 13:33:48.214015 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:48 crc kubenswrapper[4861]: I1003 13:33:48.214468 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.614287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.614936 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.616130 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.616184 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.616347 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.616429 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.617891 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.628335 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.633253 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.635940 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.641331 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.643039 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.709020 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.718156 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 13:33:55 crc kubenswrapper[4861]: I1003 13:33:55.724414 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 13:33:57 crc kubenswrapper[4861]: E1003 13:33:57.240107 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 13:33:57 crc kubenswrapper[4861]: E1003 13:33:57.240293 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g684d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wmjx9_openshift-marketplace(bc19f2ef-da47-4f43-8281-0fa924546c1b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:33:57 crc kubenswrapper[4861]: E1003 13:33:57.241456 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wmjx9" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" Oct 03 13:33:58 crc kubenswrapper[4861]: I1003 13:33:58.214639 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:33:58 crc kubenswrapper[4861]: I1003 13:33:58.214980 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.495356 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wmjx9" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.612374 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.612807 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xtp27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wqcjq_openshift-marketplace(ee4ad52b-0124-4f26-8780-cd8c7e6657b5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.614165 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wqcjq" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.680924 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.681088 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lg4jb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-69rqg_openshift-marketplace(3656f402-8d9e-4401-83a5-8367adb5b0f0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.682393 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-69rqg" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.823280 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.823444 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9zbh4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mbpzd_openshift-marketplace(b06b44ef-af9a-4253-8f45-44d98adc49bf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:33:58 crc kubenswrapper[4861]: E1003 13:33:58.824597 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mbpzd" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" Oct 03 13:34:00 crc kubenswrapper[4861]: I1003 13:34:00.145662 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:34:00 crc kubenswrapper[4861]: I1003 13:34:00.145724 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:34:03 crc kubenswrapper[4861]: E1003 13:34:03.444305 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mbpzd" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" Oct 03 13:34:03 crc kubenswrapper[4861]: E1003 13:34:03.444305 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wqcjq" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" Oct 03 13:34:03 crc kubenswrapper[4861]: E1003 13:34:03.445612 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-69rqg" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" Oct 03 13:34:08 crc kubenswrapper[4861]: I1003 13:34:08.212767 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:34:08 crc kubenswrapper[4861]: I1003 13:34:08.213147 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.235734 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.236109 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9hmq5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-8wtxc_openshift-marketplace(379a9149-fcc4-4958-81b1-a177b2afb908): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.237295 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-8wtxc" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.324819 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.325168 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9crkp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-whv8v_openshift-marketplace(c53f0f06-1b25-44b6-9797-1181e5c79ecf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.326724 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-whv8v" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.427639 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.428035 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vw66w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nbf9j_openshift-marketplace(299f9a50-345e-4196-b954-1004eb2acd0c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.429255 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nbf9j" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.463151 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.463537 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5vsrp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-76l5p_openshift-marketplace(f4dd945c-0af6-46cc-92b5-91768370d81e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.464740 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-76l5p" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.984805 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rc9jz" event={"ID":"b35950ee-9000-4269-a58d-f1d2c4563f05","Type":"ContainerStarted","Data":"3a2f8bc6fb5121a80680da973944a9fabe9e3f0d46b0b031990da8cf92374b5a"} Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.990971 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.991369 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.991482 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.992076 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"23971a738d50e7710cc61ed6775c65d33a8d63a4b057523c90938571c2176383"} Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.993669 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a5ce82a0162eb70d8bd2856e4f9c8534e68f9251ee34e0fc4a1d1581dd53ce28"} Oct 03 13:34:10 crc kubenswrapper[4861]: I1003 13:34:10.996940 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2463adb43e345f0d8a8b927d6201c84e2a813106ed0bad9f5dbdb2fb2ed89f6c"} Oct 03 13:34:10 crc kubenswrapper[4861]: E1003 13:34:10.998013 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-8wtxc" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" Oct 03 13:34:11 crc kubenswrapper[4861]: E1003 13:34:11.000596 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-whv8v" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" Oct 03 13:34:11 crc kubenswrapper[4861]: E1003 13:34:11.000636 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-nbf9j" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" Oct 03 13:34:11 crc kubenswrapper[4861]: E1003 13:34:11.000755 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-76l5p" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" Oct 03 13:34:12 crc kubenswrapper[4861]: I1003 13:34:12.001665 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1486f22154a31726e5ccf8170356b4bfac2c8bf4dd1950e57eb50a4aa8aeff3d"} Oct 03 13:34:12 crc kubenswrapper[4861]: I1003 13:34:12.004716 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"03e6cdc12156c49df7ab2dcbccce8ca3bcb8208b2703cc766fa108f42b5fdd6d"} Oct 03 13:34:12 crc kubenswrapper[4861]: I1003 13:34:12.004942 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:34:12 crc kubenswrapper[4861]: I1003 13:34:12.004979 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:34:13 crc kubenswrapper[4861]: I1003 13:34:13.010768 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:34:13 crc kubenswrapper[4861]: I1003 13:34:13.010811 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:34:13 crc kubenswrapper[4861]: I1003 13:34:13.011179 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"11586cd215530b5c671e373fefc33dd7b79c06b4206de2653abb0581953802c9"} Oct 03 13:34:14 crc kubenswrapper[4861]: I1003 13:34:14.015862 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:34:18 crc kubenswrapper[4861]: I1003 13:34:18.212125 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:34:18 crc kubenswrapper[4861]: I1003 13:34:18.212486 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:34:18 crc kubenswrapper[4861]: I1003 13:34:18.212192 4861 patch_prober.go:28] interesting pod/downloads-7954f5f757-rc9jz container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Oct 03 13:34:18 crc kubenswrapper[4861]: I1003 13:34:18.212592 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rc9jz" podUID="b35950ee-9000-4269-a58d-f1d2c4563f05" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Oct 03 13:34:28 crc kubenswrapper[4861]: I1003 13:34:28.217808 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-rc9jz" Oct 03 13:34:30 crc kubenswrapper[4861]: I1003 13:34:30.145544 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:34:30 crc kubenswrapper[4861]: I1003 13:34:30.145909 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:34:45 crc kubenswrapper[4861]: I1003 13:34:45.798165 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.145766 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.146387 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.146504 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.147330 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.147416 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4" gracePeriod=600 Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.263593 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerStarted","Data":"4a896c64da15dcafa0acc8844efca865796cf27b6448998b5fbaee1b4baad8eb"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.265513 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerStarted","Data":"92d0ab920c3238b6f3c0d979aeb24476379146d839e20e3959570303a09f8f14"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.267183 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerStarted","Data":"67e8d6f986d77d32d57db15a87968fd071cb3978158c3bd2940dc1283441ede9"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.268888 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerStarted","Data":"b2c39e4fb670f73c457ab92ff1dcf186aca025c680fb997662c1d651dfbda318"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.270679 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerStarted","Data":"d82a103bd0d57a29e684bdc6abbfc48b539b723f43b953a1a8ca8d0b233f0754"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.273100 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerStarted","Data":"aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.274682 4861 generic.go:334] "Generic (PLEG): container finished" podID="299f9a50-345e-4196-b954-1004eb2acd0c" containerID="8221348b340304ec9be773075efd4c9c0b3959e9caa3e3d8cdfcf2c1bd81627f" exitCode=0 Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.274758 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbf9j" event={"ID":"299f9a50-345e-4196-b954-1004eb2acd0c","Type":"ContainerDied","Data":"8221348b340304ec9be773075efd4c9c0b3959e9caa3e3d8cdfcf2c1bd81627f"} Oct 03 13:35:00 crc kubenswrapper[4861]: I1003 13:35:00.276303 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerStarted","Data":"c60f6e518afacc79e89201bf33f1e18d5185ace9cfdd9962c25ca57bb92a1a83"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.284106 4861 generic.go:334] "Generic (PLEG): container finished" podID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerID="aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.284290 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerDied","Data":"aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.289536 4861 generic.go:334] "Generic (PLEG): container finished" podID="379a9149-fcc4-4958-81b1-a177b2afb908" containerID="c60f6e518afacc79e89201bf33f1e18d5185ace9cfdd9962c25ca57bb92a1a83" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.289637 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerDied","Data":"c60f6e518afacc79e89201bf33f1e18d5185ace9cfdd9962c25ca57bb92a1a83"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.295781 4861 generic.go:334] "Generic (PLEG): container finished" podID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerID="4a896c64da15dcafa0acc8844efca865796cf27b6448998b5fbaee1b4baad8eb" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.295833 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerDied","Data":"4a896c64da15dcafa0acc8844efca865796cf27b6448998b5fbaee1b4baad8eb"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.301659 4861 generic.go:334] "Generic (PLEG): container finished" podID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerID="92d0ab920c3238b6f3c0d979aeb24476379146d839e20e3959570303a09f8f14" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.301749 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerDied","Data":"92d0ab920c3238b6f3c0d979aeb24476379146d839e20e3959570303a09f8f14"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.308465 4861 generic.go:334] "Generic (PLEG): container finished" podID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerID="67e8d6f986d77d32d57db15a87968fd071cb3978158c3bd2940dc1283441ede9" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.308523 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerDied","Data":"67e8d6f986d77d32d57db15a87968fd071cb3978158c3bd2940dc1283441ede9"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.320747 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.320839 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.320874 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"58a55ca6ca92c0434beafec071a82b4a4f4ceddc00d7c2e4599653cb06f6f65e"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.323165 4861 generic.go:334] "Generic (PLEG): container finished" podID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerID="b2c39e4fb670f73c457ab92ff1dcf186aca025c680fb997662c1d651dfbda318" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.323285 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerDied","Data":"b2c39e4fb670f73c457ab92ff1dcf186aca025c680fb997662c1d651dfbda318"} Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.332613 4861 generic.go:334] "Generic (PLEG): container finished" podID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerID="d82a103bd0d57a29e684bdc6abbfc48b539b723f43b953a1a8ca8d0b233f0754" exitCode=0 Oct 03 13:35:01 crc kubenswrapper[4861]: I1003 13:35:01.333280 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerDied","Data":"d82a103bd0d57a29e684bdc6abbfc48b539b723f43b953a1a8ca8d0b233f0754"} Oct 03 13:35:03 crc kubenswrapper[4861]: I1003 13:35:03.343580 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerStarted","Data":"f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31"} Oct 03 13:35:03 crc kubenswrapper[4861]: I1003 13:35:03.359272 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8wtxc" podStartSLOduration=3.339066414 podStartE2EDuration="1m52.359258612s" podCreationTimestamp="2025-10-03 13:33:11 +0000 UTC" firstStartedPulling="2025-10-03 13:33:13.541430066 +0000 UTC m=+107.539415113" lastFinishedPulling="2025-10-03 13:35:02.561622254 +0000 UTC m=+216.559607311" observedRunningTime="2025-10-03 13:35:03.357279643 +0000 UTC m=+217.355264710" watchObservedRunningTime="2025-10-03 13:35:03.359258612 +0000 UTC m=+217.357243659" Oct 03 13:35:04 crc kubenswrapper[4861]: I1003 13:35:04.351010 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerStarted","Data":"fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5"} Oct 03 13:35:04 crc kubenswrapper[4861]: I1003 13:35:04.369505 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-76l5p" podStartSLOduration=3.19255186 podStartE2EDuration="1m54.369468666s" podCreationTimestamp="2025-10-03 13:33:10 +0000 UTC" firstStartedPulling="2025-10-03 13:33:12.531392058 +0000 UTC m=+106.529377095" lastFinishedPulling="2025-10-03 13:35:03.708308854 +0000 UTC m=+217.706293901" observedRunningTime="2025-10-03 13:35:04.368758726 +0000 UTC m=+218.366743793" watchObservedRunningTime="2025-10-03 13:35:04.369468666 +0000 UTC m=+218.367453713" Oct 03 13:35:06 crc kubenswrapper[4861]: I1003 13:35:06.365015 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerStarted","Data":"9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32"} Oct 03 13:35:08 crc kubenswrapper[4861]: I1003 13:35:08.402058 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wqcjq" podStartSLOduration=7.354547509 podStartE2EDuration="2m0.402041615s" podCreationTimestamp="2025-10-03 13:33:08 +0000 UTC" firstStartedPulling="2025-10-03 13:33:11.497522663 +0000 UTC m=+105.495507710" lastFinishedPulling="2025-10-03 13:35:04.545016779 +0000 UTC m=+218.543001816" observedRunningTime="2025-10-03 13:35:08.399986005 +0000 UTC m=+222.397971052" watchObservedRunningTime="2025-10-03 13:35:08.402041615 +0000 UTC m=+222.400026662" Oct 03 13:35:08 crc kubenswrapper[4861]: I1003 13:35:08.973215 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:35:08 crc kubenswrapper[4861]: I1003 13:35:08.973290 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:35:10 crc kubenswrapper[4861]: I1003 13:35:10.079950 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:35:10 crc kubenswrapper[4861]: I1003 13:35:10.422721 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:35:10 crc kubenswrapper[4861]: I1003 13:35:10.894683 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:35:10 crc kubenswrapper[4861]: I1003 13:35:10.894727 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:35:10 crc kubenswrapper[4861]: I1003 13:35:10.929386 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:35:11 crc kubenswrapper[4861]: I1003 13:35:11.433618 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:35:11 crc kubenswrapper[4861]: I1003 13:35:11.686302 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:35:11 crc kubenswrapper[4861]: I1003 13:35:11.686998 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:35:11 crc kubenswrapper[4861]: I1003 13:35:11.870696 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:35:12 crc kubenswrapper[4861]: I1003 13:35:12.325036 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wqcjq"] Oct 03 13:35:12 crc kubenswrapper[4861]: I1003 13:35:12.399353 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wqcjq" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="registry-server" containerID="cri-o://9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32" gracePeriod=2 Oct 03 13:35:12 crc kubenswrapper[4861]: I1003 13:35:12.440211 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:35:12 crc kubenswrapper[4861]: I1003 13:35:12.927522 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-76l5p"] Oct 03 13:35:13 crc kubenswrapper[4861]: I1003 13:35:13.407151 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-76l5p" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="registry-server" containerID="cri-o://fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5" gracePeriod=2 Oct 03 13:35:14 crc kubenswrapper[4861]: I1003 13:35:14.413530 4861 generic.go:334] "Generic (PLEG): container finished" podID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerID="9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32" exitCode=0 Oct 03 13:35:14 crc kubenswrapper[4861]: I1003 13:35:14.413567 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerDied","Data":"9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32"} Oct 03 13:35:14 crc kubenswrapper[4861]: I1003 13:35:14.724257 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8wtxc"] Oct 03 13:35:15 crc kubenswrapper[4861]: I1003 13:35:15.418456 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8wtxc" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="registry-server" containerID="cri-o://f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31" gracePeriod=2 Oct 03 13:35:16 crc kubenswrapper[4861]: I1003 13:35:16.425346 4861 generic.go:334] "Generic (PLEG): container finished" podID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerID="fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5" exitCode=0 Oct 03 13:35:16 crc kubenswrapper[4861]: I1003 13:35:16.425385 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerDied","Data":"fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5"} Oct 03 13:35:18 crc kubenswrapper[4861]: E1003 13:35:18.973513 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32 is running failed: container process not found" containerID="9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:18 crc kubenswrapper[4861]: E1003 13:35:18.973945 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32 is running failed: container process not found" containerID="9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:18 crc kubenswrapper[4861]: E1003 13:35:18.974219 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32 is running failed: container process not found" containerID="9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:18 crc kubenswrapper[4861]: E1003 13:35:18.974258 4861 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-wqcjq" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="registry-server" Oct 03 13:35:19 crc kubenswrapper[4861]: I1003 13:35:19.443367 4861 generic.go:334] "Generic (PLEG): container finished" podID="379a9149-fcc4-4958-81b1-a177b2afb908" containerID="f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31" exitCode=0 Oct 03 13:35:19 crc kubenswrapper[4861]: I1003 13:35:19.443456 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerDied","Data":"f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31"} Oct 03 13:35:20 crc kubenswrapper[4861]: E1003 13:35:20.895111 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5 is running failed: container process not found" containerID="fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:20 crc kubenswrapper[4861]: E1003 13:35:20.896705 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5 is running failed: container process not found" containerID="fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:20 crc kubenswrapper[4861]: E1003 13:35:20.897254 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5 is running failed: container process not found" containerID="fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:20 crc kubenswrapper[4861]: E1003 13:35:20.897312 4861 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-76l5p" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="registry-server" Oct 03 13:35:21 crc kubenswrapper[4861]: E1003 13:35:21.686488 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31 is running failed: container process not found" containerID="f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:21 crc kubenswrapper[4861]: E1003 13:35:21.686903 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31 is running failed: container process not found" containerID="f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:21 crc kubenswrapper[4861]: E1003 13:35:21.687152 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31 is running failed: container process not found" containerID="f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 13:35:21 crc kubenswrapper[4861]: E1003 13:35:21.687177 4861 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-8wtxc" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="registry-server" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.561567 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.567035 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.723106 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-utilities\") pod \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.723168 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vsrp\" (UniqueName: \"kubernetes.io/projected/f4dd945c-0af6-46cc-92b5-91768370d81e-kube-api-access-5vsrp\") pod \"f4dd945c-0af6-46cc-92b5-91768370d81e\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.723278 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-catalog-content\") pod \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.723327 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-catalog-content\") pod \"f4dd945c-0af6-46cc-92b5-91768370d81e\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.723359 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-utilities\") pod \"f4dd945c-0af6-46cc-92b5-91768370d81e\" (UID: \"f4dd945c-0af6-46cc-92b5-91768370d81e\") " Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.723384 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtp27\" (UniqueName: \"kubernetes.io/projected/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-kube-api-access-xtp27\") pod \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\" (UID: \"ee4ad52b-0124-4f26-8780-cd8c7e6657b5\") " Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.724176 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-utilities" (OuterVolumeSpecName: "utilities") pod "ee4ad52b-0124-4f26-8780-cd8c7e6657b5" (UID: "ee4ad52b-0124-4f26-8780-cd8c7e6657b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.724922 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-utilities" (OuterVolumeSpecName: "utilities") pod "f4dd945c-0af6-46cc-92b5-91768370d81e" (UID: "f4dd945c-0af6-46cc-92b5-91768370d81e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.735954 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4dd945c-0af6-46cc-92b5-91768370d81e-kube-api-access-5vsrp" (OuterVolumeSpecName: "kube-api-access-5vsrp") pod "f4dd945c-0af6-46cc-92b5-91768370d81e" (UID: "f4dd945c-0af6-46cc-92b5-91768370d81e"). InnerVolumeSpecName "kube-api-access-5vsrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.740579 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-kube-api-access-xtp27" (OuterVolumeSpecName: "kube-api-access-xtp27") pod "ee4ad52b-0124-4f26-8780-cd8c7e6657b5" (UID: "ee4ad52b-0124-4f26-8780-cd8c7e6657b5"). InnerVolumeSpecName "kube-api-access-xtp27". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.775780 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4dd945c-0af6-46cc-92b5-91768370d81e" (UID: "f4dd945c-0af6-46cc-92b5-91768370d81e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.813988 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee4ad52b-0124-4f26-8780-cd8c7e6657b5" (UID: "ee4ad52b-0124-4f26-8780-cd8c7e6657b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.825113 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.825371 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.825471 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4dd945c-0af6-46cc-92b5-91768370d81e-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.825583 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtp27\" (UniqueName: \"kubernetes.io/projected/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-kube-api-access-xtp27\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.825668 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4ad52b-0124-4f26-8780-cd8c7e6657b5-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:22 crc kubenswrapper[4861]: I1003 13:35:22.825747 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vsrp\" (UniqueName: \"kubernetes.io/projected/f4dd945c-0af6-46cc-92b5-91768370d81e-kube-api-access-5vsrp\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.466827 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76l5p" event={"ID":"f4dd945c-0af6-46cc-92b5-91768370d81e","Type":"ContainerDied","Data":"a59c5e8acb72e38947f7d3bddc74a52ab04c80e1c8a22afbbf006331a1d92f8a"} Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.466904 4861 scope.go:117] "RemoveContainer" containerID="fef2de089903a38ca8a8e85157274d1f974549e9b7a4cd651045f56640e090b5" Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.467727 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76l5p" Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.476957 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wqcjq" event={"ID":"ee4ad52b-0124-4f26-8780-cd8c7e6657b5","Type":"ContainerDied","Data":"b03e0ab13b39a3ea2b81fa828ee65cff8b45170bd81b51918e0f59f45b6f102b"} Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.477068 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wqcjq" Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.523269 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-76l5p"] Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.526953 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-76l5p"] Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.533750 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wqcjq"] Oct 03 13:35:23 crc kubenswrapper[4861]: I1003 13:35:23.540669 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wqcjq"] Oct 03 13:35:24 crc kubenswrapper[4861]: I1003 13:35:24.882396 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" path="/var/lib/kubelet/pods/ee4ad52b-0124-4f26-8780-cd8c7e6657b5/volumes" Oct 03 13:35:24 crc kubenswrapper[4861]: I1003 13:35:24.883406 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" path="/var/lib/kubelet/pods/f4dd945c-0af6-46cc-92b5-91768370d81e/volumes" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.221787 4861 scope.go:117] "RemoveContainer" containerID="4a896c64da15dcafa0acc8844efca865796cf27b6448998b5fbaee1b4baad8eb" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.233576 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.246687 4861 scope.go:117] "RemoveContainer" containerID="96a076dea26c844a4835c386aafef188870f5181b3980e10eb8d90768889c085" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.310127 4861 scope.go:117] "RemoveContainer" containerID="9d569c45ed84ccc759f37f4d70c81b3f66fae4154a29541baf6fb0bdca616b32" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.340007 4861 scope.go:117] "RemoveContainer" containerID="d82a103bd0d57a29e684bdc6abbfc48b539b723f43b953a1a8ca8d0b233f0754" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.370530 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hmq5\" (UniqueName: \"kubernetes.io/projected/379a9149-fcc4-4958-81b1-a177b2afb908-kube-api-access-9hmq5\") pod \"379a9149-fcc4-4958-81b1-a177b2afb908\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.370611 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-catalog-content\") pod \"379a9149-fcc4-4958-81b1-a177b2afb908\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.370649 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-utilities\") pod \"379a9149-fcc4-4958-81b1-a177b2afb908\" (UID: \"379a9149-fcc4-4958-81b1-a177b2afb908\") " Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.372186 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-utilities" (OuterVolumeSpecName: "utilities") pod "379a9149-fcc4-4958-81b1-a177b2afb908" (UID: "379a9149-fcc4-4958-81b1-a177b2afb908"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.378885 4861 scope.go:117] "RemoveContainer" containerID="c2eb08273bc6a2b5406aa0284dccb13aec9f44dd7d1714c29641afde639eca1e" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.380262 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/379a9149-fcc4-4958-81b1-a177b2afb908-kube-api-access-9hmq5" (OuterVolumeSpecName: "kube-api-access-9hmq5") pod "379a9149-fcc4-4958-81b1-a177b2afb908" (UID: "379a9149-fcc4-4958-81b1-a177b2afb908"). InnerVolumeSpecName "kube-api-access-9hmq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.472522 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hmq5\" (UniqueName: \"kubernetes.io/projected/379a9149-fcc4-4958-81b1-a177b2afb908-kube-api-access-9hmq5\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.472558 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.477800 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "379a9149-fcc4-4958-81b1-a177b2afb908" (UID: "379a9149-fcc4-4958-81b1-a177b2afb908"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.494052 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8wtxc" event={"ID":"379a9149-fcc4-4958-81b1-a177b2afb908","Type":"ContainerDied","Data":"68eabdc81d584b94f848290523d79bf281f9baf23efcf7241c732ea10b79cae3"} Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.494101 4861 scope.go:117] "RemoveContainer" containerID="f338748e4ef345cfffbce587b31be369a3121b01d869f1cc08bc4f19d6dd1f31" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.494206 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8wtxc" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.516887 4861 scope.go:117] "RemoveContainer" containerID="c60f6e518afacc79e89201bf33f1e18d5185ace9cfdd9962c25ca57bb92a1a83" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.526205 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8wtxc"] Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.530308 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8wtxc"] Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.536978 4861 scope.go:117] "RemoveContainer" containerID="61e440adae1f2db4f38bd01d355612a899ffbc7d82d4cd2d3cb2c6d5e90ef4a6" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.575117 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/379a9149-fcc4-4958-81b1-a177b2afb908-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:26 crc kubenswrapper[4861]: I1003 13:35:26.698615 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" path="/var/lib/kubelet/pods/379a9149-fcc4-4958-81b1-a177b2afb908/volumes" Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.505378 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerStarted","Data":"ff0d57e5709c1029702bd86c3a6778bba73296aafc3ab85d107c625a4fb915a3"} Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.508755 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerStarted","Data":"aaf8baf14e6c25969767f16ab9ba3f73313955d9c7b4b0e01606d15fec21ce86"} Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.510954 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerStarted","Data":"a3085a4d9914bb07813a7a39c408230fb22c90f88c3aa40fe8b1b88b7442a9c8"} Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.513308 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerStarted","Data":"0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0"} Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.516244 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbf9j" event={"ID":"299f9a50-345e-4196-b954-1004eb2acd0c","Type":"ContainerStarted","Data":"2bddb3e1552c89e4767501ded459ac2fbe67db80f90574ff080288999cda277e"} Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.571122 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mbpzd" podStartSLOduration=3.7243037709999998 podStartE2EDuration="2m19.571103615s" podCreationTimestamp="2025-10-03 13:33:08 +0000 UTC" firstStartedPulling="2025-10-03 13:33:10.399394031 +0000 UTC m=+104.397379078" lastFinishedPulling="2025-10-03 13:35:26.246193875 +0000 UTC m=+240.244178922" observedRunningTime="2025-10-03 13:35:27.53956131 +0000 UTC m=+241.537546367" watchObservedRunningTime="2025-10-03 13:35:27.571103615 +0000 UTC m=+241.569088682" Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.594882 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-69rqg" podStartSLOduration=4.785191213 podStartE2EDuration="2m20.594864622s" podCreationTimestamp="2025-10-03 13:33:07 +0000 UTC" firstStartedPulling="2025-10-03 13:33:10.413475369 +0000 UTC m=+104.411460416" lastFinishedPulling="2025-10-03 13:35:26.223148778 +0000 UTC m=+240.221133825" observedRunningTime="2025-10-03 13:35:27.573960039 +0000 UTC m=+241.571945096" watchObservedRunningTime="2025-10-03 13:35:27.594864622 +0000 UTC m=+241.592849669" Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.594975 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wmjx9" podStartSLOduration=3.8349920170000003 podStartE2EDuration="2m19.594971546s" podCreationTimestamp="2025-10-03 13:33:08 +0000 UTC" firstStartedPulling="2025-10-03 13:33:10.462038546 +0000 UTC m=+104.460023593" lastFinishedPulling="2025-10-03 13:35:26.222018075 +0000 UTC m=+240.220003122" observedRunningTime="2025-10-03 13:35:27.59172171 +0000 UTC m=+241.589706757" watchObservedRunningTime="2025-10-03 13:35:27.594971546 +0000 UTC m=+241.592956593" Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.621367 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-whv8v" podStartSLOduration=4.029534912 podStartE2EDuration="2m16.62135189s" podCreationTimestamp="2025-10-03 13:33:11 +0000 UTC" firstStartedPulling="2025-10-03 13:33:13.581190129 +0000 UTC m=+107.579175186" lastFinishedPulling="2025-10-03 13:35:26.173007117 +0000 UTC m=+240.170992164" observedRunningTime="2025-10-03 13:35:27.619198256 +0000 UTC m=+241.617183303" watchObservedRunningTime="2025-10-03 13:35:27.62135189 +0000 UTC m=+241.619336937" Oct 03 13:35:27 crc kubenswrapper[4861]: I1003 13:35:27.650481 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nbf9j" podStartSLOduration=7.459293276 podStartE2EDuration="2m17.650465444s" podCreationTimestamp="2025-10-03 13:33:10 +0000 UTC" firstStartedPulling="2025-10-03 13:33:11.473984953 +0000 UTC m=+105.471970000" lastFinishedPulling="2025-10-03 13:35:21.665157121 +0000 UTC m=+235.663142168" observedRunningTime="2025-10-03 13:35:27.647691623 +0000 UTC m=+241.645676680" watchObservedRunningTime="2025-10-03 13:35:27.650465444 +0000 UTC m=+241.648450491" Oct 03 13:35:28 crc kubenswrapper[4861]: I1003 13:35:28.372424 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:35:28 crc kubenswrapper[4861]: I1003 13:35:28.372729 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:35:28 crc kubenswrapper[4861]: I1003 13:35:28.648274 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:35:28 crc kubenswrapper[4861]: I1003 13:35:28.648340 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:35:28 crc kubenswrapper[4861]: I1003 13:35:28.704442 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:35:29 crc kubenswrapper[4861]: I1003 13:35:29.101877 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:35:29 crc kubenswrapper[4861]: I1003 13:35:29.101920 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:35:29 crc kubenswrapper[4861]: I1003 13:35:29.141157 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:35:29 crc kubenswrapper[4861]: I1003 13:35:29.418542 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-69rqg" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="registry-server" probeResult="failure" output=< Oct 03 13:35:29 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 13:35:29 crc kubenswrapper[4861]: > Oct 03 13:35:30 crc kubenswrapper[4861]: I1003 13:35:30.475904 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:35:30 crc kubenswrapper[4861]: I1003 13:35:30.476326 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:35:30 crc kubenswrapper[4861]: I1003 13:35:30.517117 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:35:31 crc kubenswrapper[4861]: I1003 13:35:31.488871 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:35:31 crc kubenswrapper[4861]: I1003 13:35:31.488940 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:35:32 crc kubenswrapper[4861]: I1003 13:35:32.526550 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-whv8v" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="registry-server" probeResult="failure" output=< Oct 03 13:35:32 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 13:35:32 crc kubenswrapper[4861]: > Oct 03 13:35:38 crc kubenswrapper[4861]: I1003 13:35:38.417857 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:35:38 crc kubenswrapper[4861]: I1003 13:35:38.453785 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:35:38 crc kubenswrapper[4861]: I1003 13:35:38.687493 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:35:39 crc kubenswrapper[4861]: I1003 13:35:39.150783 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:35:39 crc kubenswrapper[4861]: I1003 13:35:39.338276 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xv8s4"] Oct 03 13:35:40 crc kubenswrapper[4861]: I1003 13:35:40.516717 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.054052 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wmjx9"] Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.054298 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wmjx9" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="registry-server" containerID="cri-o://0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0" gracePeriod=2 Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.409940 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.529247 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.571552 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.580935 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-utilities\") pod \"bc19f2ef-da47-4f43-8281-0fa924546c1b\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.581006 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g684d\" (UniqueName: \"kubernetes.io/projected/bc19f2ef-da47-4f43-8281-0fa924546c1b-kube-api-access-g684d\") pod \"bc19f2ef-da47-4f43-8281-0fa924546c1b\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.581088 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-catalog-content\") pod \"bc19f2ef-da47-4f43-8281-0fa924546c1b\" (UID: \"bc19f2ef-da47-4f43-8281-0fa924546c1b\") " Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.581930 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-utilities" (OuterVolumeSpecName: "utilities") pod "bc19f2ef-da47-4f43-8281-0fa924546c1b" (UID: "bc19f2ef-da47-4f43-8281-0fa924546c1b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.582046 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.617538 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc19f2ef-da47-4f43-8281-0fa924546c1b-kube-api-access-g684d" (OuterVolumeSpecName: "kube-api-access-g684d") pod "bc19f2ef-da47-4f43-8281-0fa924546c1b" (UID: "bc19f2ef-da47-4f43-8281-0fa924546c1b"). InnerVolumeSpecName "kube-api-access-g684d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.622592 4861 generic.go:334] "Generic (PLEG): container finished" podID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerID="0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0" exitCode=0 Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.623423 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmjx9" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.623417 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerDied","Data":"0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0"} Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.623626 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmjx9" event={"ID":"bc19f2ef-da47-4f43-8281-0fa924546c1b","Type":"ContainerDied","Data":"2616696ce8e1df3903a77d889ad54ca4f560ddce9a653d17670846921979a7dc"} Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.623659 4861 scope.go:117] "RemoveContainer" containerID="0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.653288 4861 scope.go:117] "RemoveContainer" containerID="aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.677583 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc19f2ef-da47-4f43-8281-0fa924546c1b" (UID: "bc19f2ef-da47-4f43-8281-0fa924546c1b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.679924 4861 scope.go:117] "RemoveContainer" containerID="70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.682710 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc19f2ef-da47-4f43-8281-0fa924546c1b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.682735 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g684d\" (UniqueName: \"kubernetes.io/projected/bc19f2ef-da47-4f43-8281-0fa924546c1b-kube-api-access-g684d\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.695958 4861 scope.go:117] "RemoveContainer" containerID="0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0" Oct 03 13:35:41 crc kubenswrapper[4861]: E1003 13:35:41.696294 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0\": container with ID starting with 0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0 not found: ID does not exist" containerID="0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.696335 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0"} err="failed to get container status \"0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0\": rpc error: code = NotFound desc = could not find container \"0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0\": container with ID starting with 0a41b6bd3e64892d5083fb36ea65efcd7a22d97920fa2a776cbfd88da4a6cfe0 not found: ID does not exist" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.696361 4861 scope.go:117] "RemoveContainer" containerID="aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7" Oct 03 13:35:41 crc kubenswrapper[4861]: E1003 13:35:41.696608 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7\": container with ID starting with aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7 not found: ID does not exist" containerID="aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.696635 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7"} err="failed to get container status \"aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7\": rpc error: code = NotFound desc = could not find container \"aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7\": container with ID starting with aec4f88bf63b5044b2f9994a9ba06e5d65c4a38ceb1009c910cacd2734fb5cd7 not found: ID does not exist" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.696653 4861 scope.go:117] "RemoveContainer" containerID="70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a" Oct 03 13:35:41 crc kubenswrapper[4861]: E1003 13:35:41.696921 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a\": container with ID starting with 70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a not found: ID does not exist" containerID="70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.696951 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a"} err="failed to get container status \"70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a\": rpc error: code = NotFound desc = could not find container \"70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a\": container with ID starting with 70f366fb927b16243a4ede84730e9674ef1403764f76e5008908734cdcb7d28a not found: ID does not exist" Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.954504 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wmjx9"] Oct 03 13:35:41 crc kubenswrapper[4861]: I1003 13:35:41.957189 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wmjx9"] Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.689288 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" path="/var/lib/kubelet/pods/bc19f2ef-da47-4f43-8281-0fa924546c1b/volumes" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.897707 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mbpzd"] Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.897919 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mbpzd" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="registry-server" containerID="cri-o://ff0d57e5709c1029702bd86c3a6778bba73296aafc3ab85d107c625a4fb915a3" gracePeriod=30 Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.909744 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-69rqg"] Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.909964 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-69rqg" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="registry-server" containerID="cri-o://aaf8baf14e6c25969767f16ab9ba3f73313955d9c7b4b0e01606d15fec21ce86" gracePeriod=30 Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.918088 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bj6mh"] Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.918299 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerName="marketplace-operator" containerID="cri-o://930ee4d553236fa442f10c4236d0fd2ee0ac0a7bb4c6c033a3b3c47d11a58589" gracePeriod=30 Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.928354 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbf9j"] Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.928585 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nbf9j" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="registry-server" containerID="cri-o://2bddb3e1552c89e4767501ded459ac2fbe67db80f90574ff080288999cda277e" gracePeriod=30 Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946511 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-p5qrs"] Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946723 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946738 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946750 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946757 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946767 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946778 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946785 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946793 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946801 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946808 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946817 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946824 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946834 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946841 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946852 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946859 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946870 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946878 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946888 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f407e73c-b330-4611-9a58-150ae709b761" containerName="pruner" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946895 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f407e73c-b330-4611-9a58-150ae709b761" containerName="pruner" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946905 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c3c7089-d98e-4504-91c2-27851ed21d16" containerName="collect-profiles" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946912 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c3c7089-d98e-4504-91c2-27851ed21d16" containerName="collect-profiles" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946920 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946927 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946939 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946947 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="extract-content" Oct 03 13:35:42 crc kubenswrapper[4861]: E1003 13:35:42.946960 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.946967 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="extract-utilities" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947077 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="379a9149-fcc4-4958-81b1-a177b2afb908" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947092 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c3c7089-d98e-4504-91c2-27851ed21d16" containerName="collect-profiles" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947104 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc19f2ef-da47-4f43-8281-0fa924546c1b" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947114 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee4ad52b-0124-4f26-8780-cd8c7e6657b5" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947124 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4dd945c-0af6-46cc-92b5-91768370d81e" containerName="registry-server" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947134 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f407e73c-b330-4611-9a58-150ae709b761" containerName="pruner" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.947601 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.956020 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-whv8v"] Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.956326 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-whv8v" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="registry-server" containerID="cri-o://a3085a4d9914bb07813a7a39c408230fb22c90f88c3aa40fe8b1b88b7442a9c8" gracePeriod=30 Oct 03 13:35:42 crc kubenswrapper[4861]: I1003 13:35:42.968479 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-p5qrs"] Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.099881 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d1c8721-e495-45da-8947-09c44940673d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.100045 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d1c8721-e495-45da-8947-09c44940673d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.100157 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqtq9\" (UniqueName: \"kubernetes.io/projected/6d1c8721-e495-45da-8947-09c44940673d-kube-api-access-fqtq9\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.201795 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d1c8721-e495-45da-8947-09c44940673d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.201869 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqtq9\" (UniqueName: \"kubernetes.io/projected/6d1c8721-e495-45da-8947-09c44940673d-kube-api-access-fqtq9\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.201921 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d1c8721-e495-45da-8947-09c44940673d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.203366 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d1c8721-e495-45da-8947-09c44940673d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.207432 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d1c8721-e495-45da-8947-09c44940673d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.218193 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqtq9\" (UniqueName: \"kubernetes.io/projected/6d1c8721-e495-45da-8947-09c44940673d-kube-api-access-fqtq9\") pod \"marketplace-operator-79b997595-p5qrs\" (UID: \"6d1c8721-e495-45da-8947-09c44940673d\") " pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.267818 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.635412 4861 generic.go:334] "Generic (PLEG): container finished" podID="299f9a50-345e-4196-b954-1004eb2acd0c" containerID="2bddb3e1552c89e4767501ded459ac2fbe67db80f90574ff080288999cda277e" exitCode=0 Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.635476 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbf9j" event={"ID":"299f9a50-345e-4196-b954-1004eb2acd0c","Type":"ContainerDied","Data":"2bddb3e1552c89e4767501ded459ac2fbe67db80f90574ff080288999cda277e"} Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.637293 4861 generic.go:334] "Generic (PLEG): container finished" podID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerID="930ee4d553236fa442f10c4236d0fd2ee0ac0a7bb4c6c033a3b3c47d11a58589" exitCode=0 Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.637352 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" event={"ID":"f23bb6a0-acb7-4ada-96a2-73a978d75125","Type":"ContainerDied","Data":"930ee4d553236fa442f10c4236d0fd2ee0ac0a7bb4c6c033a3b3c47d11a58589"} Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.639903 4861 generic.go:334] "Generic (PLEG): container finished" podID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerID="ff0d57e5709c1029702bd86c3a6778bba73296aafc3ab85d107c625a4fb915a3" exitCode=0 Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.639934 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerDied","Data":"ff0d57e5709c1029702bd86c3a6778bba73296aafc3ab85d107c625a4fb915a3"} Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.642800 4861 generic.go:334] "Generic (PLEG): container finished" podID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerID="aaf8baf14e6c25969767f16ab9ba3f73313955d9c7b4b0e01606d15fec21ce86" exitCode=0 Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.642874 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerDied","Data":"aaf8baf14e6c25969767f16ab9ba3f73313955d9c7b4b0e01606d15fec21ce86"} Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.645457 4861 generic.go:334] "Generic (PLEG): container finished" podID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerID="a3085a4d9914bb07813a7a39c408230fb22c90f88c3aa40fe8b1b88b7442a9c8" exitCode=0 Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.645498 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerDied","Data":"a3085a4d9914bb07813a7a39c408230fb22c90f88c3aa40fe8b1b88b7442a9c8"} Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.662763 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-p5qrs"] Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.885670 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:35:43 crc kubenswrapper[4861]: I1003 13:35:43.986030 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.012911 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-catalog-content\") pod \"b06b44ef-af9a-4253-8f45-44d98adc49bf\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.013018 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-utilities\") pod \"b06b44ef-af9a-4253-8f45-44d98adc49bf\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.013144 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zbh4\" (UniqueName: \"kubernetes.io/projected/b06b44ef-af9a-4253-8f45-44d98adc49bf-kube-api-access-9zbh4\") pod \"b06b44ef-af9a-4253-8f45-44d98adc49bf\" (UID: \"b06b44ef-af9a-4253-8f45-44d98adc49bf\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.014818 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-utilities" (OuterVolumeSpecName: "utilities") pod "b06b44ef-af9a-4253-8f45-44d98adc49bf" (UID: "b06b44ef-af9a-4253-8f45-44d98adc49bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.019623 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b06b44ef-af9a-4253-8f45-44d98adc49bf-kube-api-access-9zbh4" (OuterVolumeSpecName: "kube-api-access-9zbh4") pod "b06b44ef-af9a-4253-8f45-44d98adc49bf" (UID: "b06b44ef-af9a-4253-8f45-44d98adc49bf"). InnerVolumeSpecName "kube-api-access-9zbh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.080149 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b06b44ef-af9a-4253-8f45-44d98adc49bf" (UID: "b06b44ef-af9a-4253-8f45-44d98adc49bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.083585 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.092824 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.095046 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.114146 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw66w\" (UniqueName: \"kubernetes.io/projected/299f9a50-345e-4196-b954-1004eb2acd0c-kube-api-access-vw66w\") pod \"299f9a50-345e-4196-b954-1004eb2acd0c\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.114213 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-catalog-content\") pod \"299f9a50-345e-4196-b954-1004eb2acd0c\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.114267 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-utilities\") pod \"299f9a50-345e-4196-b954-1004eb2acd0c\" (UID: \"299f9a50-345e-4196-b954-1004eb2acd0c\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.114504 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.114516 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06b44ef-af9a-4253-8f45-44d98adc49bf-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.114525 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zbh4\" (UniqueName: \"kubernetes.io/projected/b06b44ef-af9a-4253-8f45-44d98adc49bf-kube-api-access-9zbh4\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.115018 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-utilities" (OuterVolumeSpecName: "utilities") pod "299f9a50-345e-4196-b954-1004eb2acd0c" (UID: "299f9a50-345e-4196-b954-1004eb2acd0c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.116464 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/299f9a50-345e-4196-b954-1004eb2acd0c-kube-api-access-vw66w" (OuterVolumeSpecName: "kube-api-access-vw66w") pod "299f9a50-345e-4196-b954-1004eb2acd0c" (UID: "299f9a50-345e-4196-b954-1004eb2acd0c"). InnerVolumeSpecName "kube-api-access-vw66w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.147168 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "299f9a50-345e-4196-b954-1004eb2acd0c" (UID: "299f9a50-345e-4196-b954-1004eb2acd0c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215474 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9crkp\" (UniqueName: \"kubernetes.io/projected/c53f0f06-1b25-44b6-9797-1181e5c79ecf-kube-api-access-9crkp\") pod \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215525 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-utilities\") pod \"3656f402-8d9e-4401-83a5-8367adb5b0f0\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215558 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-trusted-ca\") pod \"f23bb6a0-acb7-4ada-96a2-73a978d75125\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215604 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-operator-metrics\") pod \"f23bb6a0-acb7-4ada-96a2-73a978d75125\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215635 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-catalog-content\") pod \"3656f402-8d9e-4401-83a5-8367adb5b0f0\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215663 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-catalog-content\") pod \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215689 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg4jb\" (UniqueName: \"kubernetes.io/projected/3656f402-8d9e-4401-83a5-8367adb5b0f0-kube-api-access-lg4jb\") pod \"3656f402-8d9e-4401-83a5-8367adb5b0f0\" (UID: \"3656f402-8d9e-4401-83a5-8367adb5b0f0\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215729 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjgz7\" (UniqueName: \"kubernetes.io/projected/f23bb6a0-acb7-4ada-96a2-73a978d75125-kube-api-access-zjgz7\") pod \"f23bb6a0-acb7-4ada-96a2-73a978d75125\" (UID: \"f23bb6a0-acb7-4ada-96a2-73a978d75125\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215752 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-utilities\") pod \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\" (UID: \"c53f0f06-1b25-44b6-9797-1181e5c79ecf\") " Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215910 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215921 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/299f9a50-345e-4196-b954-1004eb2acd0c-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.215930 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw66w\" (UniqueName: \"kubernetes.io/projected/299f9a50-345e-4196-b954-1004eb2acd0c-kube-api-access-vw66w\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.216535 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-utilities" (OuterVolumeSpecName: "utilities") pod "c53f0f06-1b25-44b6-9797-1181e5c79ecf" (UID: "c53f0f06-1b25-44b6-9797-1181e5c79ecf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.217285 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f23bb6a0-acb7-4ada-96a2-73a978d75125" (UID: "f23bb6a0-acb7-4ada-96a2-73a978d75125"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.217858 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-utilities" (OuterVolumeSpecName: "utilities") pod "3656f402-8d9e-4401-83a5-8367adb5b0f0" (UID: "3656f402-8d9e-4401-83a5-8367adb5b0f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.218970 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c53f0f06-1b25-44b6-9797-1181e5c79ecf-kube-api-access-9crkp" (OuterVolumeSpecName: "kube-api-access-9crkp") pod "c53f0f06-1b25-44b6-9797-1181e5c79ecf" (UID: "c53f0f06-1b25-44b6-9797-1181e5c79ecf"). InnerVolumeSpecName "kube-api-access-9crkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.219581 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3656f402-8d9e-4401-83a5-8367adb5b0f0-kube-api-access-lg4jb" (OuterVolumeSpecName: "kube-api-access-lg4jb") pod "3656f402-8d9e-4401-83a5-8367adb5b0f0" (UID: "3656f402-8d9e-4401-83a5-8367adb5b0f0"). InnerVolumeSpecName "kube-api-access-lg4jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.220179 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f23bb6a0-acb7-4ada-96a2-73a978d75125-kube-api-access-zjgz7" (OuterVolumeSpecName: "kube-api-access-zjgz7") pod "f23bb6a0-acb7-4ada-96a2-73a978d75125" (UID: "f23bb6a0-acb7-4ada-96a2-73a978d75125"). InnerVolumeSpecName "kube-api-access-zjgz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.220549 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f23bb6a0-acb7-4ada-96a2-73a978d75125" (UID: "f23bb6a0-acb7-4ada-96a2-73a978d75125"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.283959 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3656f402-8d9e-4401-83a5-8367adb5b0f0" (UID: "3656f402-8d9e-4401-83a5-8367adb5b0f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319829 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjgz7\" (UniqueName: \"kubernetes.io/projected/f23bb6a0-acb7-4ada-96a2-73a978d75125-kube-api-access-zjgz7\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319870 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319881 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9crkp\" (UniqueName: \"kubernetes.io/projected/c53f0f06-1b25-44b6-9797-1181e5c79ecf-kube-api-access-9crkp\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319890 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319903 4861 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319912 4861 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f23bb6a0-acb7-4ada-96a2-73a978d75125-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319920 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3656f402-8d9e-4401-83a5-8367adb5b0f0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.319931 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg4jb\" (UniqueName: \"kubernetes.io/projected/3656f402-8d9e-4401-83a5-8367adb5b0f0-kube-api-access-lg4jb\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.373967 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c53f0f06-1b25-44b6-9797-1181e5c79ecf" (UID: "c53f0f06-1b25-44b6-9797-1181e5c79ecf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.421136 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c53f0f06-1b25-44b6-9797-1181e5c79ecf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.651896 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" event={"ID":"f23bb6a0-acb7-4ada-96a2-73a978d75125","Type":"ContainerDied","Data":"2544c81ddfd2d0d2c87e5532b8d5a5c2e3f4dd56967cbbc27ed75b5dde740b7d"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.652786 4861 scope.go:117] "RemoveContainer" containerID="930ee4d553236fa442f10c4236d0fd2ee0ac0a7bb4c6c033a3b3c47d11a58589" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.651915 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bj6mh" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.656171 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbpzd" event={"ID":"b06b44ef-af9a-4253-8f45-44d98adc49bf","Type":"ContainerDied","Data":"fc1ae8e36f66dd86e8a17966eddc8838ebf87900d7be613a4cd7575151b8ccb8"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.656275 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbpzd" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.658595 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-69rqg" event={"ID":"3656f402-8d9e-4401-83a5-8367adb5b0f0","Type":"ContainerDied","Data":"b6065b0742374c202b8508ffbe7457f186222cd96b0c138f89c00d652cc74c28"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.658660 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-69rqg" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.670882 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whv8v" event={"ID":"c53f0f06-1b25-44b6-9797-1181e5c79ecf","Type":"ContainerDied","Data":"c2eb7c354fab8a7e55dda4c6bc2f314cf63872406fe67dab48866e6ef4cf8067"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.670974 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whv8v" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.674438 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" event={"ID":"6d1c8721-e495-45da-8947-09c44940673d","Type":"ContainerStarted","Data":"f907d1897bb3e89f149e40a822f9feace32b2ebb9d1f5785dd6980799a3a45c8"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.674481 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" event={"ID":"6d1c8721-e495-45da-8947-09c44940673d","Type":"ContainerStarted","Data":"d6728fd5f772b6854687902aed803d5fd525dd1e0f473835b5663976a7c67144"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.675465 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.682263 4861 scope.go:117] "RemoveContainer" containerID="ff0d57e5709c1029702bd86c3a6778bba73296aafc3ab85d107c625a4fb915a3" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.682941 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.683741 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbf9j" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.722505 4861 scope.go:117] "RemoveContainer" containerID="92d0ab920c3238b6f3c0d979aeb24476379146d839e20e3959570303a09f8f14" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.743889 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bj6mh"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.743925 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bj6mh"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.743939 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbf9j" event={"ID":"299f9a50-345e-4196-b954-1004eb2acd0c","Type":"ContainerDied","Data":"8f14c91771cf75a641155d6934105486b079123222c5814297b538cd03510221"} Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.744926 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-p5qrs" podStartSLOduration=2.744902834 podStartE2EDuration="2.744902834s" podCreationTimestamp="2025-10-03 13:35:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:35:44.741977478 +0000 UTC m=+258.739962535" watchObservedRunningTime="2025-10-03 13:35:44.744902834 +0000 UTC m=+258.742887891" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.770198 4861 scope.go:117] "RemoveContainer" containerID="a7deffce8358fd5fc414c099c19a02132dccfce0ea4ea378aa40163ae116ab32" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.777460 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-69rqg"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.793312 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-69rqg"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.796333 4861 scope.go:117] "RemoveContainer" containerID="aaf8baf14e6c25969767f16ab9ba3f73313955d9c7b4b0e01606d15fec21ce86" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.807387 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-whv8v"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.815371 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-whv8v"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.824824 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mbpzd"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.827923 4861 scope.go:117] "RemoveContainer" containerID="67e8d6f986d77d32d57db15a87968fd071cb3978158c3bd2940dc1283441ede9" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.830157 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mbpzd"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.833364 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbf9j"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.835276 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbf9j"] Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.848720 4861 scope.go:117] "RemoveContainer" containerID="9ff5726b5a3ac2f89e4292af71ddcd6f1eac8b092348b249458e3a4a61ccafda" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.866901 4861 scope.go:117] "RemoveContainer" containerID="a3085a4d9914bb07813a7a39c408230fb22c90f88c3aa40fe8b1b88b7442a9c8" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.882722 4861 scope.go:117] "RemoveContainer" containerID="b2c39e4fb670f73c457ab92ff1dcf186aca025c680fb997662c1d651dfbda318" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.894929 4861 scope.go:117] "RemoveContainer" containerID="7b96793850f176e5c52a45c1c4a69f8dc4f618f694df3255cdd14853d331e5b3" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.910313 4861 scope.go:117] "RemoveContainer" containerID="2bddb3e1552c89e4767501ded459ac2fbe67db80f90574ff080288999cda277e" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.923564 4861 scope.go:117] "RemoveContainer" containerID="8221348b340304ec9be773075efd4c9c0b3959e9caa3e3d8cdfcf2c1bd81627f" Oct 03 13:35:44 crc kubenswrapper[4861]: I1003 13:35:44.940741 4861 scope.go:117] "RemoveContainer" containerID="8accb3533b51ad509147e372ac6b5b5370dd1ef3f1b1e81232269dac3f31be02" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461462 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vl5pp"] Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461682 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461807 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461832 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461839 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461850 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461856 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461863 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461869 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461877 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461883 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461893 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461898 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461907 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461912 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461919 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461924 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461934 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461939 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="extract-utilities" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461945 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461951 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461958 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461964 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461973 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerName="marketplace-operator" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461978 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerName="marketplace-operator" Oct 03 13:35:45 crc kubenswrapper[4861]: E1003 13:35:45.461987 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.461992 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="extract-content" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.462089 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.462100 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.462109 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.462116 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" containerName="marketplace-operator" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.462125 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" containerName="registry-server" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.462787 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.465804 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.477208 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vl5pp"] Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.537377 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdbs9\" (UniqueName: \"kubernetes.io/projected/d2c62b5f-5f8c-4245-90a3-fb06846e063b-kube-api-access-kdbs9\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.537473 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c62b5f-5f8c-4245-90a3-fb06846e063b-utilities\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.537537 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c62b5f-5f8c-4245-90a3-fb06846e063b-catalog-content\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.638726 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c62b5f-5f8c-4245-90a3-fb06846e063b-catalog-content\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.638774 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdbs9\" (UniqueName: \"kubernetes.io/projected/d2c62b5f-5f8c-4245-90a3-fb06846e063b-kube-api-access-kdbs9\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.638843 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c62b5f-5f8c-4245-90a3-fb06846e063b-utilities\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.639137 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c62b5f-5f8c-4245-90a3-fb06846e063b-catalog-content\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.639175 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c62b5f-5f8c-4245-90a3-fb06846e063b-utilities\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.657918 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdbs9\" (UniqueName: \"kubernetes.io/projected/d2c62b5f-5f8c-4245-90a3-fb06846e063b-kube-api-access-kdbs9\") pod \"redhat-operators-vl5pp\" (UID: \"d2c62b5f-5f8c-4245-90a3-fb06846e063b\") " pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.816336 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:45 crc kubenswrapper[4861]: I1003 13:35:45.998826 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vl5pp"] Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.465586 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c5wgm"] Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.467546 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.472776 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.479492 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5wgm"] Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.548146 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe2ed840-3dd5-4276-b039-eec14967f4ee-utilities\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.548455 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe2ed840-3dd5-4276-b039-eec14967f4ee-catalog-content\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.548573 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6mtr\" (UniqueName: \"kubernetes.io/projected/fe2ed840-3dd5-4276-b039-eec14967f4ee-kube-api-access-r6mtr\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.650476 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe2ed840-3dd5-4276-b039-eec14967f4ee-catalog-content\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.651075 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe2ed840-3dd5-4276-b039-eec14967f4ee-catalog-content\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.650948 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6mtr\" (UniqueName: \"kubernetes.io/projected/fe2ed840-3dd5-4276-b039-eec14967f4ee-kube-api-access-r6mtr\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.652176 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe2ed840-3dd5-4276-b039-eec14967f4ee-utilities\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.652735 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe2ed840-3dd5-4276-b039-eec14967f4ee-utilities\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.671914 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6mtr\" (UniqueName: \"kubernetes.io/projected/fe2ed840-3dd5-4276-b039-eec14967f4ee-kube-api-access-r6mtr\") pod \"certified-operators-c5wgm\" (UID: \"fe2ed840-3dd5-4276-b039-eec14967f4ee\") " pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.689425 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="299f9a50-345e-4196-b954-1004eb2acd0c" path="/var/lib/kubelet/pods/299f9a50-345e-4196-b954-1004eb2acd0c/volumes" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.692256 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3656f402-8d9e-4401-83a5-8367adb5b0f0" path="/var/lib/kubelet/pods/3656f402-8d9e-4401-83a5-8367adb5b0f0/volumes" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.692796 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b06b44ef-af9a-4253-8f45-44d98adc49bf" path="/var/lib/kubelet/pods/b06b44ef-af9a-4253-8f45-44d98adc49bf/volumes" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.694072 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c53f0f06-1b25-44b6-9797-1181e5c79ecf" path="/var/lib/kubelet/pods/c53f0f06-1b25-44b6-9797-1181e5c79ecf/volumes" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.716877 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f23bb6a0-acb7-4ada-96a2-73a978d75125" path="/var/lib/kubelet/pods/f23bb6a0-acb7-4ada-96a2-73a978d75125/volumes" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.724649 4861 generic.go:334] "Generic (PLEG): container finished" podID="d2c62b5f-5f8c-4245-90a3-fb06846e063b" containerID="3418a4abb1517c799c2036bd3136be15b40e06c9ee913feb57c1c010ec7f5af0" exitCode=0 Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.725529 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl5pp" event={"ID":"d2c62b5f-5f8c-4245-90a3-fb06846e063b","Type":"ContainerDied","Data":"3418a4abb1517c799c2036bd3136be15b40e06c9ee913feb57c1c010ec7f5af0"} Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.725555 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl5pp" event={"ID":"d2c62b5f-5f8c-4245-90a3-fb06846e063b","Type":"ContainerStarted","Data":"17991f460abd1e83bd23e0a9fd04dbcfcb7757baa59c1a418ad01ef318fdbde8"} Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.787192 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:46 crc kubenswrapper[4861]: I1003 13:35:46.989953 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5wgm"] Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.731484 4861 generic.go:334] "Generic (PLEG): container finished" podID="fe2ed840-3dd5-4276-b039-eec14967f4ee" containerID="a1c60e1adcaa01fc88ae8142d110d790ec48c7f043710b37a3618b204c6c9200" exitCode=0 Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.731537 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wgm" event={"ID":"fe2ed840-3dd5-4276-b039-eec14967f4ee","Type":"ContainerDied","Data":"a1c60e1adcaa01fc88ae8142d110d790ec48c7f043710b37a3618b204c6c9200"} Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.731567 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wgm" event={"ID":"fe2ed840-3dd5-4276-b039-eec14967f4ee","Type":"ContainerStarted","Data":"e1ad832b12f4ecb674594929fb505b205acb0d63cdf96123485d3bf66b0c7778"} Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.856677 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9nrfv"] Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.857957 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.860574 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.871783 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nrfv"] Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.975390 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cded624e-5d0b-45da-9e1f-de6def114c00-utilities\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.975452 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd8mv\" (UniqueName: \"kubernetes.io/projected/cded624e-5d0b-45da-9e1f-de6def114c00-kube-api-access-gd8mv\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:47 crc kubenswrapper[4861]: I1003 13:35:47.975498 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cded624e-5d0b-45da-9e1f-de6def114c00-catalog-content\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.076721 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cded624e-5d0b-45da-9e1f-de6def114c00-utilities\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.076761 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd8mv\" (UniqueName: \"kubernetes.io/projected/cded624e-5d0b-45da-9e1f-de6def114c00-kube-api-access-gd8mv\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.076789 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cded624e-5d0b-45da-9e1f-de6def114c00-catalog-content\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.077183 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cded624e-5d0b-45da-9e1f-de6def114c00-catalog-content\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.077206 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cded624e-5d0b-45da-9e1f-de6def114c00-utilities\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.107771 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd8mv\" (UniqueName: \"kubernetes.io/projected/cded624e-5d0b-45da-9e1f-de6def114c00-kube-api-access-gd8mv\") pod \"redhat-marketplace-9nrfv\" (UID: \"cded624e-5d0b-45da-9e1f-de6def114c00\") " pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.181350 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.379943 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nrfv"] Oct 03 13:35:48 crc kubenswrapper[4861]: W1003 13:35:48.386948 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcded624e_5d0b_45da_9e1f_de6def114c00.slice/crio-58a610a954089b05c09c33bfc87e6cab68b78749782d07bdaa51c30e1f684b57 WatchSource:0}: Error finding container 58a610a954089b05c09c33bfc87e6cab68b78749782d07bdaa51c30e1f684b57: Status 404 returned error can't find the container with id 58a610a954089b05c09c33bfc87e6cab68b78749782d07bdaa51c30e1f684b57 Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.738300 4861 generic.go:334] "Generic (PLEG): container finished" podID="cded624e-5d0b-45da-9e1f-de6def114c00" containerID="d0558d4760544671c08d67f84d1f50bc60d133d5199c160c9b2af7690ae95c53" exitCode=0 Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.738447 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nrfv" event={"ID":"cded624e-5d0b-45da-9e1f-de6def114c00","Type":"ContainerDied","Data":"d0558d4760544671c08d67f84d1f50bc60d133d5199c160c9b2af7690ae95c53"} Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.738711 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nrfv" event={"ID":"cded624e-5d0b-45da-9e1f-de6def114c00","Type":"ContainerStarted","Data":"58a610a954089b05c09c33bfc87e6cab68b78749782d07bdaa51c30e1f684b57"} Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.745780 4861 generic.go:334] "Generic (PLEG): container finished" podID="d2c62b5f-5f8c-4245-90a3-fb06846e063b" containerID="ee29a826d52110cd9088e9f7364a5be5d5be1f4725bf93db2a6677c2afe2d54c" exitCode=0 Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.745846 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl5pp" event={"ID":"d2c62b5f-5f8c-4245-90a3-fb06846e063b","Type":"ContainerDied","Data":"ee29a826d52110cd9088e9f7364a5be5d5be1f4725bf93db2a6677c2afe2d54c"} Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.864280 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6q6wr"] Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.865175 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.866674 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.873257 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6q6wr"] Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.987971 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqkn8\" (UniqueName: \"kubernetes.io/projected/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-kube-api-access-kqkn8\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.988027 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-catalog-content\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:48 crc kubenswrapper[4861]: I1003 13:35:48.988152 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-utilities\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.089570 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqkn8\" (UniqueName: \"kubernetes.io/projected/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-kube-api-access-kqkn8\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.089636 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-catalog-content\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.089681 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-utilities\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.090103 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-utilities\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.090379 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-catalog-content\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.108752 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqkn8\" (UniqueName: \"kubernetes.io/projected/e67e431c-da77-437f-a3e7-1e6abb3ccc8b-kube-api-access-kqkn8\") pod \"community-operators-6q6wr\" (UID: \"e67e431c-da77-437f-a3e7-1e6abb3ccc8b\") " pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.209540 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.395884 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6q6wr"] Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.754459 4861 generic.go:334] "Generic (PLEG): container finished" podID="e67e431c-da77-437f-a3e7-1e6abb3ccc8b" containerID="bac9d52e00fd4d74c865c7e09bc60ad4ac2c436a6247bd6b6fbeacb9591f0d3c" exitCode=0 Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.754552 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q6wr" event={"ID":"e67e431c-da77-437f-a3e7-1e6abb3ccc8b","Type":"ContainerDied","Data":"bac9d52e00fd4d74c865c7e09bc60ad4ac2c436a6247bd6b6fbeacb9591f0d3c"} Oct 03 13:35:49 crc kubenswrapper[4861]: I1003 13:35:49.756347 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q6wr" event={"ID":"e67e431c-da77-437f-a3e7-1e6abb3ccc8b","Type":"ContainerStarted","Data":"2a16aeb8ceb3f791d024a0228c0630b3461283482400ce01d299a1c80117c2b0"} Oct 03 13:35:51 crc kubenswrapper[4861]: I1003 13:35:51.766893 4861 generic.go:334] "Generic (PLEG): container finished" podID="fe2ed840-3dd5-4276-b039-eec14967f4ee" containerID="19911dc61a58abfc3ce2e17e4a95cb85583a0007957d847a5c0496716c55be2c" exitCode=0 Oct 03 13:35:51 crc kubenswrapper[4861]: I1003 13:35:51.767078 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wgm" event={"ID":"fe2ed840-3dd5-4276-b039-eec14967f4ee","Type":"ContainerDied","Data":"19911dc61a58abfc3ce2e17e4a95cb85583a0007957d847a5c0496716c55be2c"} Oct 03 13:35:51 crc kubenswrapper[4861]: I1003 13:35:51.771476 4861 generic.go:334] "Generic (PLEG): container finished" podID="cded624e-5d0b-45da-9e1f-de6def114c00" containerID="da8021f18900989c533281e70baa8877d40679b19c38f3c465661301718268cb" exitCode=0 Oct 03 13:35:51 crc kubenswrapper[4861]: I1003 13:35:51.771532 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nrfv" event={"ID":"cded624e-5d0b-45da-9e1f-de6def114c00","Type":"ContainerDied","Data":"da8021f18900989c533281e70baa8877d40679b19c38f3c465661301718268cb"} Oct 03 13:35:51 crc kubenswrapper[4861]: I1003 13:35:51.774523 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl5pp" event={"ID":"d2c62b5f-5f8c-4245-90a3-fb06846e063b","Type":"ContainerStarted","Data":"66a069ed505fac1a382830e9b85dcbc27153a6c990ccb4ccaeef4700c2cee90e"} Oct 03 13:35:51 crc kubenswrapper[4861]: I1003 13:35:51.829611 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vl5pp" podStartSLOduration=2.925132051 podStartE2EDuration="6.829581311s" podCreationTimestamp="2025-10-03 13:35:45 +0000 UTC" firstStartedPulling="2025-10-03 13:35:46.725989599 +0000 UTC m=+260.723974646" lastFinishedPulling="2025-10-03 13:35:50.630438859 +0000 UTC m=+264.628423906" observedRunningTime="2025-10-03 13:35:51.823733088 +0000 UTC m=+265.821718135" watchObservedRunningTime="2025-10-03 13:35:51.829581311 +0000 UTC m=+265.827566358" Oct 03 13:35:52 crc kubenswrapper[4861]: I1003 13:35:52.781933 4861 generic.go:334] "Generic (PLEG): container finished" podID="e67e431c-da77-437f-a3e7-1e6abb3ccc8b" containerID="f66b87636d2ef431cf3aeea00dcc916b6a4c0df2f35fc91f27036f4bb2f733dd" exitCode=0 Oct 03 13:35:52 crc kubenswrapper[4861]: I1003 13:35:52.782000 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q6wr" event={"ID":"e67e431c-da77-437f-a3e7-1e6abb3ccc8b","Type":"ContainerDied","Data":"f66b87636d2ef431cf3aeea00dcc916b6a4c0df2f35fc91f27036f4bb2f733dd"} Oct 03 13:35:53 crc kubenswrapper[4861]: I1003 13:35:53.789513 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nrfv" event={"ID":"cded624e-5d0b-45da-9e1f-de6def114c00","Type":"ContainerStarted","Data":"c92a24024326e00cf689a42a397ef0dfb651d0320f42fea4d7c6235efb11a059"} Oct 03 13:35:53 crc kubenswrapper[4861]: I1003 13:35:53.792515 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wgm" event={"ID":"fe2ed840-3dd5-4276-b039-eec14967f4ee","Type":"ContainerStarted","Data":"3049d9ba1c3f773e9815c91819a0b416101762aed9c430246e31215eb1778ca6"} Oct 03 13:35:53 crc kubenswrapper[4861]: I1003 13:35:53.807685 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9nrfv" podStartSLOduration=2.71089638 podStartE2EDuration="6.807669005s" podCreationTimestamp="2025-10-03 13:35:47 +0000 UTC" firstStartedPulling="2025-10-03 13:35:48.739761807 +0000 UTC m=+262.737746854" lastFinishedPulling="2025-10-03 13:35:52.836534442 +0000 UTC m=+266.834519479" observedRunningTime="2025-10-03 13:35:53.80673215 +0000 UTC m=+267.804717197" watchObservedRunningTime="2025-10-03 13:35:53.807669005 +0000 UTC m=+267.805654052" Oct 03 13:35:53 crc kubenswrapper[4861]: I1003 13:35:53.835316 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c5wgm" podStartSLOduration=2.844419731 podStartE2EDuration="7.835298344s" podCreationTimestamp="2025-10-03 13:35:46 +0000 UTC" firstStartedPulling="2025-10-03 13:35:47.76669767 +0000 UTC m=+261.764682717" lastFinishedPulling="2025-10-03 13:35:52.757576283 +0000 UTC m=+266.755561330" observedRunningTime="2025-10-03 13:35:53.833959077 +0000 UTC m=+267.831944124" watchObservedRunningTime="2025-10-03 13:35:53.835298344 +0000 UTC m=+267.833283391" Oct 03 13:35:55 crc kubenswrapper[4861]: I1003 13:35:55.805382 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q6wr" event={"ID":"e67e431c-da77-437f-a3e7-1e6abb3ccc8b","Type":"ContainerStarted","Data":"2c1ded2b090e4b6fad0eee0352e4521d035625131b553508c2c4c7f2878b1252"} Oct 03 13:35:55 crc kubenswrapper[4861]: I1003 13:35:55.817397 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:55 crc kubenswrapper[4861]: I1003 13:35:55.817681 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:55 crc kubenswrapper[4861]: I1003 13:35:55.830403 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6q6wr" podStartSLOduration=4.053502995 podStartE2EDuration="7.830385023s" podCreationTimestamp="2025-10-03 13:35:48 +0000 UTC" firstStartedPulling="2025-10-03 13:35:50.763935206 +0000 UTC m=+264.761920253" lastFinishedPulling="2025-10-03 13:35:54.540817234 +0000 UTC m=+268.538802281" observedRunningTime="2025-10-03 13:35:55.826541976 +0000 UTC m=+269.824527023" watchObservedRunningTime="2025-10-03 13:35:55.830385023 +0000 UTC m=+269.828370070" Oct 03 13:35:55 crc kubenswrapper[4861]: I1003 13:35:55.854585 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:56 crc kubenswrapper[4861]: I1003 13:35:56.787944 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:56 crc kubenswrapper[4861]: I1003 13:35:56.788270 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:56 crc kubenswrapper[4861]: I1003 13:35:56.839376 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:35:56 crc kubenswrapper[4861]: I1003 13:35:56.851360 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vl5pp" Oct 03 13:35:58 crc kubenswrapper[4861]: I1003 13:35:58.181851 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:58 crc kubenswrapper[4861]: I1003 13:35:58.181915 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:58 crc kubenswrapper[4861]: I1003 13:35:58.224257 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:58 crc kubenswrapper[4861]: I1003 13:35:58.864020 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9nrfv" Oct 03 13:35:59 crc kubenswrapper[4861]: I1003 13:35:59.210506 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:59 crc kubenswrapper[4861]: I1003 13:35:59.210615 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:35:59 crc kubenswrapper[4861]: I1003 13:35:59.249738 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:36:04 crc kubenswrapper[4861]: I1003 13:36:04.375798 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerName="oauth-openshift" containerID="cri-o://57d3c5249df6882323924c5029b19e87f77757713781c3671eb64dd4a532dd2b" gracePeriod=15 Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.871889 4861 generic.go:334] "Generic (PLEG): container finished" podID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerID="57d3c5249df6882323924c5029b19e87f77757713781c3671eb64dd4a532dd2b" exitCode=0 Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.871987 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" event={"ID":"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89","Type":"ContainerDied","Data":"57d3c5249df6882323924c5029b19e87f77757713781c3671eb64dd4a532dd2b"} Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.902073 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.948689 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6dc597b7cf-m9k67"] Oct 03 13:36:05 crc kubenswrapper[4861]: E1003 13:36:05.948932 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerName="oauth-openshift" Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.948945 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerName="oauth-openshift" Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.949042 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" containerName="oauth-openshift" Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.949515 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:05 crc kubenswrapper[4861]: I1003 13:36:05.951555 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6dc597b7cf-m9k67"] Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017253 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-policies\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017325 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-ocp-branding-template\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017353 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-session\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017375 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-trusted-ca-bundle\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017429 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-cliconfig\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017495 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-error\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017529 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-serving-cert\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017554 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-idp-0-file-data\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017575 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-dir\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017608 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-router-certs\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017632 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpnnd\" (UniqueName: \"kubernetes.io/projected/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-kube-api-access-gpnnd\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017660 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-provider-selection\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017692 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-login\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017736 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-service-ca\") pod \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\" (UID: \"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89\") " Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017924 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017956 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-service-ca\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.017979 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018025 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-login\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018047 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-error\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018081 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018103 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-session\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018139 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018157 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-router-certs\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018175 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a80e642a-9b7b-4e12-a358-9010d98e1e82-audit-dir\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018200 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-audit-policies\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018223 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018273 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018298 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j42zf\" (UniqueName: \"kubernetes.io/projected/a80e642a-9b7b-4e12-a358-9010d98e1e82-kube-api-access-j42zf\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.018957 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.020108 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.020156 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.020168 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.020609 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.025003 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.025574 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-kube-api-access-gpnnd" (OuterVolumeSpecName: "kube-api-access-gpnnd") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "kube-api-access-gpnnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.025690 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.026077 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.026409 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.026867 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.027042 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.031335 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.044470 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" (UID: "fdb216bf-a9cf-4f81-95ee-0424fc4bdc89"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119687 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119746 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-router-certs\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119775 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a80e642a-9b7b-4e12-a358-9010d98e1e82-audit-dir\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119796 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-audit-policies\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119811 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119831 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119850 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j42zf\" (UniqueName: \"kubernetes.io/projected/a80e642a-9b7b-4e12-a358-9010d98e1e82-kube-api-access-j42zf\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119898 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119916 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-service-ca\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119932 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119969 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-login\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.119985 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-error\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120011 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120029 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-session\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120070 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120082 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120090 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120100 4861 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120110 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpnnd\" (UniqueName: \"kubernetes.io/projected/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-kube-api-access-gpnnd\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120118 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120128 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120138 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120147 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120157 4861 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120165 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120174 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120183 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.120192 4861 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.122392 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-service-ca\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.122983 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.123056 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a80e642a-9b7b-4e12-a358-9010d98e1e82-audit-dir\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.123219 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-audit-policies\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.124177 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.126647 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.127194 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-error\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.128344 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-login\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.128762 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-router-certs\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.129513 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.132856 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-session\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.135041 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.135810 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a80e642a-9b7b-4e12-a358-9010d98e1e82-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.137009 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j42zf\" (UniqueName: \"kubernetes.io/projected/a80e642a-9b7b-4e12-a358-9010d98e1e82-kube-api-access-j42zf\") pod \"oauth-openshift-6dc597b7cf-m9k67\" (UID: \"a80e642a-9b7b-4e12-a358-9010d98e1e82\") " pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.268859 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.455042 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6dc597b7cf-m9k67"] Oct 03 13:36:06 crc kubenswrapper[4861]: W1003 13:36:06.460304 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda80e642a_9b7b_4e12_a358_9010d98e1e82.slice/crio-91463c8f5a7651b3b8328b8d915350127f0f8b47abb8b99f82e19d415aed9449 WatchSource:0}: Error finding container 91463c8f5a7651b3b8328b8d915350127f0f8b47abb8b99f82e19d415aed9449: Status 404 returned error can't find the container with id 91463c8f5a7651b3b8328b8d915350127f0f8b47abb8b99f82e19d415aed9449 Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.834169 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c5wgm" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.880448 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" event={"ID":"a80e642a-9b7b-4e12-a358-9010d98e1e82","Type":"ContainerStarted","Data":"58e618f74e1b3a259748f90a0a4691aef5ae2cd65d65b89f1714bec868be9c91"} Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.881543 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" event={"ID":"a80e642a-9b7b-4e12-a358-9010d98e1e82","Type":"ContainerStarted","Data":"91463c8f5a7651b3b8328b8d915350127f0f8b47abb8b99f82e19d415aed9449"} Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.881643 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.884993 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" event={"ID":"fdb216bf-a9cf-4f81-95ee-0424fc4bdc89","Type":"ContainerDied","Data":"b687c2bb1fe6c4730f8e3eb0f6bbd2458756de3f9e608fa4b47b572d5a15336c"} Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.885278 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xv8s4" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.885706 4861 scope.go:117] "RemoveContainer" containerID="57d3c5249df6882323924c5029b19e87f77757713781c3671eb64dd4a532dd2b" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.914010 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" podStartSLOduration=27.913994013 podStartE2EDuration="27.913994013s" podCreationTimestamp="2025-10-03 13:35:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:36:06.91100427 +0000 UTC m=+280.908989317" watchObservedRunningTime="2025-10-03 13:36:06.913994013 +0000 UTC m=+280.911979080" Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.937643 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xv8s4"] Oct 03 13:36:06 crc kubenswrapper[4861]: I1003 13:36:06.942009 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xv8s4"] Oct 03 13:36:07 crc kubenswrapper[4861]: I1003 13:36:07.267127 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6dc597b7cf-m9k67" Oct 03 13:36:08 crc kubenswrapper[4861]: I1003 13:36:08.695656 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdb216bf-a9cf-4f81-95ee-0424fc4bdc89" path="/var/lib/kubelet/pods/fdb216bf-a9cf-4f81-95ee-0424fc4bdc89/volumes" Oct 03 13:36:09 crc kubenswrapper[4861]: I1003 13:36:09.259815 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6q6wr" Oct 03 13:37:00 crc kubenswrapper[4861]: I1003 13:37:00.145090 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:37:00 crc kubenswrapper[4861]: I1003 13:37:00.145687 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:37:30 crc kubenswrapper[4861]: I1003 13:37:30.144924 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:37:30 crc kubenswrapper[4861]: I1003 13:37:30.145729 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.145947 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.146554 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.146602 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.147155 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"58a55ca6ca92c0434beafec071a82b4a4f4ceddc00d7c2e4599653cb06f6f65e"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.147204 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://58a55ca6ca92c0434beafec071a82b4a4f4ceddc00d7c2e4599653cb06f6f65e" gracePeriod=600 Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.452610 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="58a55ca6ca92c0434beafec071a82b4a4f4ceddc00d7c2e4599653cb06f6f65e" exitCode=0 Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.452821 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"58a55ca6ca92c0434beafec071a82b4a4f4ceddc00d7c2e4599653cb06f6f65e"} Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.452954 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"e480ef4df69d33dab6234057237660f52eab1528687ed0bfdbfbb9854b248c93"} Oct 03 13:38:00 crc kubenswrapper[4861]: I1003 13:38:00.452976 4861 scope.go:117] "RemoveContainer" containerID="871a1c47b73846e3f28db33691e75b5ed73af7287e81dae4cf2134fd827614b4" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.714883 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lbhzn"] Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.716212 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.734412 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lbhzn"] Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.862452 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-trusted-ca\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.862540 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-registry-tls\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.862706 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.862826 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-registry-certificates\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.862894 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4pdv\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-kube-api-access-f4pdv\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.862969 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.863027 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-bound-sa-token\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.863064 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.886448 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.964523 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.964915 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-registry-certificates\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.964965 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4pdv\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-kube-api-access-f4pdv\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.964991 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-bound-sa-token\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.965009 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.965039 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-trusted-ca\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.966309 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-registry-tls\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.966396 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-registry-certificates\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.966406 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-trusted-ca\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.965112 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.970733 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.970760 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-registry-tls\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.984187 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4pdv\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-kube-api-access-f4pdv\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:28 crc kubenswrapper[4861]: I1003 13:38:28.984456 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cea5a4f9-7f99-4691-88aa-220aa55c5d9e-bound-sa-token\") pod \"image-registry-66df7c8f76-lbhzn\" (UID: \"cea5a4f9-7f99-4691-88aa-220aa55c5d9e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:29 crc kubenswrapper[4861]: I1003 13:38:29.034254 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:29 crc kubenswrapper[4861]: I1003 13:38:29.455055 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lbhzn"] Oct 03 13:38:29 crc kubenswrapper[4861]: I1003 13:38:29.610350 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" event={"ID":"cea5a4f9-7f99-4691-88aa-220aa55c5d9e","Type":"ContainerStarted","Data":"d743050e9fdd18fc5b54de5af5f707521e13e8e01b5f906e5271acba1179c566"} Oct 03 13:38:29 crc kubenswrapper[4861]: I1003 13:38:29.610393 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" event={"ID":"cea5a4f9-7f99-4691-88aa-220aa55c5d9e","Type":"ContainerStarted","Data":"48f29aa79813601dd82289ff8b32af6a9bfa2bc20919f6dfd29888a87f673c77"} Oct 03 13:38:29 crc kubenswrapper[4861]: I1003 13:38:29.610934 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:29 crc kubenswrapper[4861]: I1003 13:38:29.634555 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" podStartSLOduration=1.634498577 podStartE2EDuration="1.634498577s" podCreationTimestamp="2025-10-03 13:38:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:38:29.630179547 +0000 UTC m=+423.628164594" watchObservedRunningTime="2025-10-03 13:38:29.634498577 +0000 UTC m=+423.632483644" Oct 03 13:38:49 crc kubenswrapper[4861]: I1003 13:38:49.042681 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-lbhzn" Oct 03 13:38:49 crc kubenswrapper[4861]: I1003 13:38:49.114719 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-t6vlm"] Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.161668 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" podUID="2ddecabf-87aa-4eda-93a5-eee5c61d3b91" containerName="registry" containerID="cri-o://789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8" gracePeriod=30 Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.510776 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.659735 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-bound-sa-token\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.659775 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-tls\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.659800 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-ca-trust-extracted\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.659845 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-trusted-ca\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.659893 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjn98\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-kube-api-access-hjn98\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.660692 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.660762 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-certificates\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.660835 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-installation-pull-secrets\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.660756 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.661015 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\" (UID: \"2ddecabf-87aa-4eda-93a5-eee5c61d3b91\") " Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.661200 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.661217 4861 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.666952 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.668427 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-kube-api-access-hjn98" (OuterVolumeSpecName: "kube-api-access-hjn98") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "kube-api-access-hjn98". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.670512 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.670742 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.675429 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.676316 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2ddecabf-87aa-4eda-93a5-eee5c61d3b91" (UID: "2ddecabf-87aa-4eda-93a5-eee5c61d3b91"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.761715 4861 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.761747 4861 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.761756 4861 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.761776 4861 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.761786 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjn98\" (UniqueName: \"kubernetes.io/projected/2ddecabf-87aa-4eda-93a5-eee5c61d3b91-kube-api-access-hjn98\") on node \"crc\" DevicePath \"\"" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.885016 4861 generic.go:334] "Generic (PLEG): container finished" podID="2ddecabf-87aa-4eda-93a5-eee5c61d3b91" containerID="789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8" exitCode=0 Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.885073 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" event={"ID":"2ddecabf-87aa-4eda-93a5-eee5c61d3b91","Type":"ContainerDied","Data":"789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8"} Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.885289 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" event={"ID":"2ddecabf-87aa-4eda-93a5-eee5c61d3b91","Type":"ContainerDied","Data":"3796e43f8075237131e19dd0668c9c8479c97bc6b3bb5104757e6ce6a495d5de"} Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.885098 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-t6vlm" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.885424 4861 scope.go:117] "RemoveContainer" containerID="789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.903579 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-t6vlm"] Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.911554 4861 scope.go:117] "RemoveContainer" containerID="789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8" Oct 03 13:39:14 crc kubenswrapper[4861]: E1003 13:39:14.912034 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8\": container with ID starting with 789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8 not found: ID does not exist" containerID="789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.912063 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8"} err="failed to get container status \"789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8\": rpc error: code = NotFound desc = could not find container \"789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8\": container with ID starting with 789f3d6042afd6b0852c4c1b40373543030c212fe1dc4c028c60578f294c16b8 not found: ID does not exist" Oct 03 13:39:14 crc kubenswrapper[4861]: I1003 13:39:14.912668 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-t6vlm"] Oct 03 13:39:16 crc kubenswrapper[4861]: I1003 13:39:16.687591 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ddecabf-87aa-4eda-93a5-eee5c61d3b91" path="/var/lib/kubelet/pods/2ddecabf-87aa-4eda-93a5-eee5c61d3b91/volumes" Oct 03 13:40:00 crc kubenswrapper[4861]: I1003 13:40:00.145004 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:40:00 crc kubenswrapper[4861]: I1003 13:40:00.145564 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:40:30 crc kubenswrapper[4861]: I1003 13:40:30.144867 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:40:30 crc kubenswrapper[4861]: I1003 13:40:30.145400 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.145622 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.146188 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.146262 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.146891 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e480ef4df69d33dab6234057237660f52eab1528687ed0bfdbfbb9854b248c93"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.146965 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://e480ef4df69d33dab6234057237660f52eab1528687ed0bfdbfbb9854b248c93" gracePeriod=600 Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.412327 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="e480ef4df69d33dab6234057237660f52eab1528687ed0bfdbfbb9854b248c93" exitCode=0 Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.412393 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"e480ef4df69d33dab6234057237660f52eab1528687ed0bfdbfbb9854b248c93"} Oct 03 13:41:00 crc kubenswrapper[4861]: I1003 13:41:00.412679 4861 scope.go:117] "RemoveContainer" containerID="58a55ca6ca92c0434beafec071a82b4a4f4ceddc00d7c2e4599653cb06f6f65e" Oct 03 13:41:01 crc kubenswrapper[4861]: I1003 13:41:01.421212 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"d80b8d3116622b7c143b4b78ef1cc92a62fa6958e50f67859a9a0dce0c925ac5"} Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.631568 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-fkrcs"] Oct 03 13:42:41 crc kubenswrapper[4861]: E1003 13:42:41.632410 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ddecabf-87aa-4eda-93a5-eee5c61d3b91" containerName="registry" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.632428 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ddecabf-87aa-4eda-93a5-eee5c61d3b91" containerName="registry" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.632553 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ddecabf-87aa-4eda-93a5-eee5c61d3b91" containerName="registry" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.632998 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.635826 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.636102 4861 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-982cm" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.636261 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.645714 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mpwfn"] Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.646303 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-mpwfn" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.655258 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-fkrcs"] Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.655659 4861 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-ws6hh" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.665828 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-872cz"] Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.666687 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.669418 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mpwfn"] Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.670378 4861 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-h7268" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.678424 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-872cz"] Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.709440 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp7cj\" (UniqueName: \"kubernetes.io/projected/10cb85f1-4e24-4793-b1d0-8c3b11ceb85c-kube-api-access-wp7cj\") pod \"cert-manager-cainjector-7f985d654d-fkrcs\" (UID: \"10cb85f1-4e24-4793-b1d0-8c3b11ceb85c\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.709506 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gmql\" (UniqueName: \"kubernetes.io/projected/16b81380-ffaa-4755-a4d0-86c03eb2094f-kube-api-access-5gmql\") pod \"cert-manager-webhook-5655c58dd6-872cz\" (UID: \"16b81380-ffaa-4755-a4d0-86c03eb2094f\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.709585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxndr\" (UniqueName: \"kubernetes.io/projected/f5edb507-a930-4aac-b964-575b85ab8676-kube-api-access-dxndr\") pod \"cert-manager-5b446d88c5-mpwfn\" (UID: \"f5edb507-a930-4aac-b964-575b85ab8676\") " pod="cert-manager/cert-manager-5b446d88c5-mpwfn" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.810360 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gmql\" (UniqueName: \"kubernetes.io/projected/16b81380-ffaa-4755-a4d0-86c03eb2094f-kube-api-access-5gmql\") pod \"cert-manager-webhook-5655c58dd6-872cz\" (UID: \"16b81380-ffaa-4755-a4d0-86c03eb2094f\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.810496 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxndr\" (UniqueName: \"kubernetes.io/projected/f5edb507-a930-4aac-b964-575b85ab8676-kube-api-access-dxndr\") pod \"cert-manager-5b446d88c5-mpwfn\" (UID: \"f5edb507-a930-4aac-b964-575b85ab8676\") " pod="cert-manager/cert-manager-5b446d88c5-mpwfn" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.811321 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp7cj\" (UniqueName: \"kubernetes.io/projected/10cb85f1-4e24-4793-b1d0-8c3b11ceb85c-kube-api-access-wp7cj\") pod \"cert-manager-cainjector-7f985d654d-fkrcs\" (UID: \"10cb85f1-4e24-4793-b1d0-8c3b11ceb85c\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.834248 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gmql\" (UniqueName: \"kubernetes.io/projected/16b81380-ffaa-4755-a4d0-86c03eb2094f-kube-api-access-5gmql\") pod \"cert-manager-webhook-5655c58dd6-872cz\" (UID: \"16b81380-ffaa-4755-a4d0-86c03eb2094f\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.837860 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp7cj\" (UniqueName: \"kubernetes.io/projected/10cb85f1-4e24-4793-b1d0-8c3b11ceb85c-kube-api-access-wp7cj\") pod \"cert-manager-cainjector-7f985d654d-fkrcs\" (UID: \"10cb85f1-4e24-4793-b1d0-8c3b11ceb85c\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.840505 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxndr\" (UniqueName: \"kubernetes.io/projected/f5edb507-a930-4aac-b964-575b85ab8676-kube-api-access-dxndr\") pod \"cert-manager-5b446d88c5-mpwfn\" (UID: \"f5edb507-a930-4aac-b964-575b85ab8676\") " pod="cert-manager/cert-manager-5b446d88c5-mpwfn" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.956940 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.966243 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-mpwfn" Oct 03 13:42:41 crc kubenswrapper[4861]: I1003 13:42:41.982135 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.239013 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-fkrcs"] Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.250991 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.266980 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mpwfn"] Oct 03 13:42:42 crc kubenswrapper[4861]: W1003 13:42:42.270908 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5edb507_a930_4aac_b964_575b85ab8676.slice/crio-5195b3b9caba5690fd1dc56c182fef6c5c67b608cdc353de1d984ceece5063fa WatchSource:0}: Error finding container 5195b3b9caba5690fd1dc56c182fef6c5c67b608cdc353de1d984ceece5063fa: Status 404 returned error can't find the container with id 5195b3b9caba5690fd1dc56c182fef6c5c67b608cdc353de1d984ceece5063fa Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.324744 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-872cz"] Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.956459 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" event={"ID":"10cb85f1-4e24-4793-b1d0-8c3b11ceb85c","Type":"ContainerStarted","Data":"6bef4b7f529a2bef4056eb391212b90d2f30fa57dd72074147fbe45384621127"} Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.957460 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-mpwfn" event={"ID":"f5edb507-a930-4aac-b964-575b85ab8676","Type":"ContainerStarted","Data":"5195b3b9caba5690fd1dc56c182fef6c5c67b608cdc353de1d984ceece5063fa"} Oct 03 13:42:42 crc kubenswrapper[4861]: I1003 13:42:42.958208 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" event={"ID":"16b81380-ffaa-4755-a4d0-86c03eb2094f","Type":"ContainerStarted","Data":"fe33bbf0b6f2327a6b3fcb8f978da897615c7b5349127f812ec865b8b7f0b4c6"} Oct 03 13:42:45 crc kubenswrapper[4861]: I1003 13:42:45.978445 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" event={"ID":"16b81380-ffaa-4755-a4d0-86c03eb2094f","Type":"ContainerStarted","Data":"9127c70390a79b24053a9a5c5be4cf1d23fdd690e432e5b8796deee33242d736"} Oct 03 13:42:45 crc kubenswrapper[4861]: I1003 13:42:45.978895 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:46 crc kubenswrapper[4861]: I1003 13:42:46.000500 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" podStartSLOduration=1.638434117 podStartE2EDuration="5.000473515s" podCreationTimestamp="2025-10-03 13:42:41 +0000 UTC" firstStartedPulling="2025-10-03 13:42:42.331635755 +0000 UTC m=+676.329620802" lastFinishedPulling="2025-10-03 13:42:45.693675153 +0000 UTC m=+679.691660200" observedRunningTime="2025-10-03 13:42:45.992808804 +0000 UTC m=+679.990793881" watchObservedRunningTime="2025-10-03 13:42:46.000473515 +0000 UTC m=+679.998458562" Oct 03 13:42:46 crc kubenswrapper[4861]: I1003 13:42:46.984785 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" event={"ID":"10cb85f1-4e24-4793-b1d0-8c3b11ceb85c","Type":"ContainerStarted","Data":"b7fc01a20ae1704bc0dbc8fa0ea580e9a239b3164cce9b7522eb4853b15e3085"} Oct 03 13:42:48 crc kubenswrapper[4861]: I1003 13:42:48.994206 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-mpwfn" event={"ID":"f5edb507-a930-4aac-b964-575b85ab8676","Type":"ContainerStarted","Data":"a26fde921346d92cfc05bbae0b83ba50f57926d365ef0d270f350758224f8bc9"} Oct 03 13:42:49 crc kubenswrapper[4861]: I1003 13:42:49.014542 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-mpwfn" podStartSLOduration=2.163835035 podStartE2EDuration="8.014521179s" podCreationTimestamp="2025-10-03 13:42:41 +0000 UTC" firstStartedPulling="2025-10-03 13:42:42.272888783 +0000 UTC m=+676.270873830" lastFinishedPulling="2025-10-03 13:42:48.123574917 +0000 UTC m=+682.121559974" observedRunningTime="2025-10-03 13:42:49.013887553 +0000 UTC m=+683.011872600" watchObservedRunningTime="2025-10-03 13:42:49.014521179 +0000 UTC m=+683.012506236" Oct 03 13:42:49 crc kubenswrapper[4861]: I1003 13:42:49.014874 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-fkrcs" podStartSLOduration=3.702898749 podStartE2EDuration="8.014866808s" podCreationTimestamp="2025-10-03 13:42:41 +0000 UTC" firstStartedPulling="2025-10-03 13:42:42.25066144 +0000 UTC m=+676.248646487" lastFinishedPulling="2025-10-03 13:42:46.562629499 +0000 UTC m=+680.560614546" observedRunningTime="2025-10-03 13:42:47.006653352 +0000 UTC m=+681.004638399" watchObservedRunningTime="2025-10-03 13:42:49.014866808 +0000 UTC m=+683.012851855" Oct 03 13:42:51 crc kubenswrapper[4861]: I1003 13:42:51.984953 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-872cz" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.052750 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5twn4"] Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.053605 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-controller" containerID="cri-o://a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.053710 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.053788 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="northd" containerID="cri-o://5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.053710 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="nbdb" containerID="cri-o://3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.053773 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="sbdb" containerID="cri-o://f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.054005 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-acl-logging" containerID="cri-o://17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.054158 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-node" containerID="cri-o://a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.115760 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" containerID="cri-o://c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" gracePeriod=30 Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.410944 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/2.log" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.412734 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovn-acl-logging/0.log" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.413155 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovn-controller/0.log" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.413580 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450152 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-systemd\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450220 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwnjl\" (UniqueName: \"kubernetes.io/projected/66ed4999-426b-4615-bfb3-764a3ecc950f-kube-api-access-nwnjl\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450267 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-ovn-kubernetes\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450297 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-openvswitch\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450338 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-slash\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450366 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-script-lib\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450388 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-env-overrides\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450429 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-ovn\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450459 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-netd\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450482 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-systemd-units\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450509 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-log-socket\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450530 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-netns\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450551 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-node-log\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450579 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-bin\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450617 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66ed4999-426b-4615-bfb3-764a3ecc950f-ovn-node-metrics-cert\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450644 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-var-lib-openvswitch\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450666 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450690 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-config\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450713 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-kubelet\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450732 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-etc-openvswitch\") pod \"66ed4999-426b-4615-bfb3-764a3ecc950f\" (UID: \"66ed4999-426b-4615-bfb3-764a3ecc950f\") " Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.450994 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.451963 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.452428 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.452621 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.452649 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.452668 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.452685 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-slash" (OuterVolumeSpecName: "host-slash") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453131 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453198 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453248 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-log-socket" (OuterVolumeSpecName: "log-socket") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453286 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453311 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453340 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453368 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453392 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-node-log" (OuterVolumeSpecName: "node-log") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453415 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.453891 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.458360 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66ed4999-426b-4615-bfb3-764a3ecc950f-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.459013 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66ed4999-426b-4615-bfb3-764a3ecc950f-kube-api-access-nwnjl" (OuterVolumeSpecName: "kube-api-access-nwnjl") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "kube-api-access-nwnjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.471361 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "66ed4999-426b-4615-bfb3-764a3ecc950f" (UID: "66ed4999-426b-4615-bfb3-764a3ecc950f"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.475512 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ts4sc"] Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.475866 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kubecfg-setup" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.475898 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kubecfg-setup" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.475912 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.475920 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.475933 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.475941 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.475968 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.475974 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.475989 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.475996 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476006 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="northd" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476013 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="northd" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476024 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="sbdb" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476052 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="sbdb" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476062 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-node" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476069 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-node" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476078 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-acl-logging" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476084 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-acl-logging" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476098 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="nbdb" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476105 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="nbdb" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476282 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476296 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="northd" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476329 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="nbdb" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476341 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476351 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="sbdb" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476361 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476368 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476379 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovn-acl-logging" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476387 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476416 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="kube-rbac-proxy-node" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476575 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476584 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476705 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: E1003 13:42:52.476895 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.476906 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerName="ovnkube-controller" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.479050 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.551812 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-run-netns\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.551866 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-ovn\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.551889 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-ovnkube-config\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.551908 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-var-lib-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552021 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-etc-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552085 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-systemd\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552129 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-slash\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552174 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/20a66b6e-e251-4052-a494-233b917d0883-ovn-node-metrics-cert\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552292 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-run-ovn-kubernetes\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552337 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-systemd-units\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552375 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-env-overrides\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552397 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-ovnkube-script-lib\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552459 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552487 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-log-socket\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552517 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552543 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-kubelet\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552557 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-cni-netd\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552580 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-node-log\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552594 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhqzt\" (UniqueName: \"kubernetes.io/projected/20a66b6e-e251-4052-a494-233b917d0883-kube-api-access-mhqzt\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552609 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-cni-bin\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552797 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwnjl\" (UniqueName: \"kubernetes.io/projected/66ed4999-426b-4615-bfb3-764a3ecc950f-kube-api-access-nwnjl\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552848 4861 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552867 4861 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552881 4861 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-slash\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552895 4861 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552908 4861 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552922 4861 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552936 4861 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552949 4861 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552961 4861 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-log-socket\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552972 4861 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552983 4861 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-node-log\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.552996 4861 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553008 4861 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/66ed4999-426b-4615-bfb3-764a3ecc950f-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553020 4861 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553033 4861 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553047 4861 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/66ed4999-426b-4615-bfb3-764a3ecc950f-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553059 4861 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553070 4861 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.553081 4861 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/66ed4999-426b-4615-bfb3-764a3ecc950f-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.653887 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-etc-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.653948 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-systemd\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.653975 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-slash\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654001 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/20a66b6e-e251-4052-a494-233b917d0883-ovn-node-metrics-cert\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654039 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-run-ovn-kubernetes\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654060 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-systemd-units\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654078 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-ovnkube-script-lib\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654092 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-env-overrides\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654109 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654126 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-log-socket\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654145 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654205 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-kubelet\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654243 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-cni-netd\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654266 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhqzt\" (UniqueName: \"kubernetes.io/projected/20a66b6e-e251-4052-a494-233b917d0883-kube-api-access-mhqzt\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654294 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-node-log\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654318 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-cni-bin\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654339 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-run-netns\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654361 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-ovn\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654391 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-ovnkube-config\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654413 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-var-lib-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654493 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-var-lib-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654540 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-etc-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654568 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-systemd\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.654594 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-slash\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655135 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-log-socket\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655138 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-cni-bin\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655177 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655183 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-openvswitch\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655255 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-node-log\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655264 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-systemd-units\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655346 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-cni-netd\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655519 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-run-ovn-kubernetes\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655563 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-run-netns\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655743 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-run-ovn\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655774 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-env-overrides\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-ovnkube-config\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.655837 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/20a66b6e-e251-4052-a494-233b917d0883-host-kubelet\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.656146 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/20a66b6e-e251-4052-a494-233b917d0883-ovnkube-script-lib\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.658378 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/20a66b6e-e251-4052-a494-233b917d0883-ovn-node-metrics-cert\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.672515 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhqzt\" (UniqueName: \"kubernetes.io/projected/20a66b6e-e251-4052-a494-233b917d0883-kube-api-access-mhqzt\") pod \"ovnkube-node-ts4sc\" (UID: \"20a66b6e-e251-4052-a494-233b917d0883\") " pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: I1003 13:42:52.801808 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:52 crc kubenswrapper[4861]: W1003 13:42:52.819860 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20a66b6e_e251_4052_a494_233b917d0883.slice/crio-b959cc56ff88a44db04cc17856651d2d94ebfd5caea91c26cce0fa6fb2d251dc WatchSource:0}: Error finding container b959cc56ff88a44db04cc17856651d2d94ebfd5caea91c26cce0fa6fb2d251dc: Status 404 returned error can't find the container with id b959cc56ff88a44db04cc17856651d2d94ebfd5caea91c26cce0fa6fb2d251dc Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.024085 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwgvx_f714b7db-082f-4c2c-8239-ba5df6986c13/kube-multus/1.log" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.025281 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwgvx_f714b7db-082f-4c2c-8239-ba5df6986c13/kube-multus/0.log" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.025323 4861 generic.go:334] "Generic (PLEG): container finished" podID="f714b7db-082f-4c2c-8239-ba5df6986c13" containerID="428a5d17c11b15a2616655e5d2597ca82ffecd4eaecc97e33c3143839af4fda0" exitCode=2 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.025384 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwgvx" event={"ID":"f714b7db-082f-4c2c-8239-ba5df6986c13","Type":"ContainerDied","Data":"428a5d17c11b15a2616655e5d2597ca82ffecd4eaecc97e33c3143839af4fda0"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.025425 4861 scope.go:117] "RemoveContainer" containerID="163f5cadc8f9cf8082434639e5dd0dfae5cefc359dbf462b616e4dde476a309f" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.025919 4861 scope.go:117] "RemoveContainer" containerID="428a5d17c11b15a2616655e5d2597ca82ffecd4eaecc97e33c3143839af4fda0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.028917 4861 generic.go:334] "Generic (PLEG): container finished" podID="20a66b6e-e251-4052-a494-233b917d0883" containerID="8ce9f89f3748f5d72037f4eab515315e57ab8334f508da107bacea5ca1f30c7e" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.028972 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerDied","Data":"8ce9f89f3748f5d72037f4eab515315e57ab8334f508da107bacea5ca1f30c7e"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.028995 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"b959cc56ff88a44db04cc17856651d2d94ebfd5caea91c26cce0fa6fb2d251dc"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.034104 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovnkube-controller/2.log" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.059186 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovn-acl-logging/0.log" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.065319 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5twn4_66ed4999-426b-4615-bfb3-764a3ecc950f/ovn-controller/0.log" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.066897 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.066936 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.066945 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.066978 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.066988 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.066996 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" exitCode=0 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067004 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" exitCode=143 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067013 4861 generic.go:334] "Generic (PLEG): container finished" podID="66ed4999-426b-4615-bfb3-764a3ecc950f" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" exitCode=143 Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067038 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067073 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067091 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067108 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067125 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067138 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067151 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067167 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067176 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067184 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067193 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067202 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067210 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067218 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067245 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067259 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067274 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067284 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067293 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067301 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067311 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067319 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067327 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067359 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067367 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067375 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067385 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067399 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067407 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067415 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067422 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067430 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067441 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067453 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067461 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067469 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067477 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067487 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" event={"ID":"66ed4999-426b-4615-bfb3-764a3ecc950f","Type":"ContainerDied","Data":"2837ee081d61d6690d27de9787d553e2e97701af87910f6df2680dee8faa326c"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067498 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067506 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067513 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067521 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067527 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067534 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067541 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067547 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067554 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067561 4861 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067579 4861 scope.go:117] "RemoveContainer" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.067742 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5twn4" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.110495 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.128130 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5twn4"] Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.134087 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5twn4"] Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.149495 4861 scope.go:117] "RemoveContainer" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.162951 4861 scope.go:117] "RemoveContainer" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.185106 4861 scope.go:117] "RemoveContainer" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.234933 4861 scope.go:117] "RemoveContainer" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.262211 4861 scope.go:117] "RemoveContainer" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.276700 4861 scope.go:117] "RemoveContainer" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.295675 4861 scope.go:117] "RemoveContainer" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.314112 4861 scope.go:117] "RemoveContainer" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.330991 4861 scope.go:117] "RemoveContainer" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.331628 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": container with ID starting with c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0 not found: ID does not exist" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.331670 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} err="failed to get container status \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": rpc error: code = NotFound desc = could not find container \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": container with ID starting with c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.331697 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.334444 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": container with ID starting with efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a not found: ID does not exist" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.334510 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} err="failed to get container status \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": rpc error: code = NotFound desc = could not find container \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": container with ID starting with efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.334540 4861 scope.go:117] "RemoveContainer" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.335048 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": container with ID starting with f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984 not found: ID does not exist" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.335091 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} err="failed to get container status \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": rpc error: code = NotFound desc = could not find container \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": container with ID starting with f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.335120 4861 scope.go:117] "RemoveContainer" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.335592 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": container with ID starting with 3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724 not found: ID does not exist" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.335620 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} err="failed to get container status \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": rpc error: code = NotFound desc = could not find container \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": container with ID starting with 3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.335639 4861 scope.go:117] "RemoveContainer" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.335955 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": container with ID starting with 5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2 not found: ID does not exist" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.335981 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} err="failed to get container status \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": rpc error: code = NotFound desc = could not find container \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": container with ID starting with 5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336000 4861 scope.go:117] "RemoveContainer" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.336302 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": container with ID starting with 1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84 not found: ID does not exist" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336328 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} err="failed to get container status \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": rpc error: code = NotFound desc = could not find container \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": container with ID starting with 1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336346 4861 scope.go:117] "RemoveContainer" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.336599 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": container with ID starting with a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935 not found: ID does not exist" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336628 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} err="failed to get container status \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": rpc error: code = NotFound desc = could not find container \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": container with ID starting with a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336645 4861 scope.go:117] "RemoveContainer" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.336860 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": container with ID starting with 17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48 not found: ID does not exist" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336886 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} err="failed to get container status \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": rpc error: code = NotFound desc = could not find container \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": container with ID starting with 17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.336910 4861 scope.go:117] "RemoveContainer" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.337113 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": container with ID starting with a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213 not found: ID does not exist" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.337140 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} err="failed to get container status \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": rpc error: code = NotFound desc = could not find container \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": container with ID starting with a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.337159 4861 scope.go:117] "RemoveContainer" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" Oct 03 13:42:53 crc kubenswrapper[4861]: E1003 13:42:53.337520 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": container with ID starting with 85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e not found: ID does not exist" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.337547 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} err="failed to get container status \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": rpc error: code = NotFound desc = could not find container \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": container with ID starting with 85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.337566 4861 scope.go:117] "RemoveContainer" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.337958 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} err="failed to get container status \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": rpc error: code = NotFound desc = could not find container \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": container with ID starting with c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.337986 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.338241 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} err="failed to get container status \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": rpc error: code = NotFound desc = could not find container \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": container with ID starting with efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.338256 4861 scope.go:117] "RemoveContainer" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.339446 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} err="failed to get container status \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": rpc error: code = NotFound desc = could not find container \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": container with ID starting with f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.339518 4861 scope.go:117] "RemoveContainer" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.339910 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} err="failed to get container status \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": rpc error: code = NotFound desc = could not find container \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": container with ID starting with 3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.339931 4861 scope.go:117] "RemoveContainer" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.340240 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} err="failed to get container status \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": rpc error: code = NotFound desc = could not find container \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": container with ID starting with 5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.340263 4861 scope.go:117] "RemoveContainer" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.340470 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} err="failed to get container status \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": rpc error: code = NotFound desc = could not find container \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": container with ID starting with 1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.340500 4861 scope.go:117] "RemoveContainer" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.340747 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} err="failed to get container status \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": rpc error: code = NotFound desc = could not find container \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": container with ID starting with a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.340767 4861 scope.go:117] "RemoveContainer" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.341085 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} err="failed to get container status \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": rpc error: code = NotFound desc = could not find container \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": container with ID starting with 17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.341109 4861 scope.go:117] "RemoveContainer" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.341430 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} err="failed to get container status \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": rpc error: code = NotFound desc = could not find container \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": container with ID starting with a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.341450 4861 scope.go:117] "RemoveContainer" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.341755 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} err="failed to get container status \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": rpc error: code = NotFound desc = could not find container \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": container with ID starting with 85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.341794 4861 scope.go:117] "RemoveContainer" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.342066 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} err="failed to get container status \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": rpc error: code = NotFound desc = could not find container \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": container with ID starting with c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.342098 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.342455 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} err="failed to get container status \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": rpc error: code = NotFound desc = could not find container \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": container with ID starting with efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.342497 4861 scope.go:117] "RemoveContainer" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.342751 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} err="failed to get container status \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": rpc error: code = NotFound desc = could not find container \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": container with ID starting with f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.342795 4861 scope.go:117] "RemoveContainer" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.343086 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} err="failed to get container status \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": rpc error: code = NotFound desc = could not find container \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": container with ID starting with 3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.343113 4861 scope.go:117] "RemoveContainer" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.344051 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} err="failed to get container status \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": rpc error: code = NotFound desc = could not find container \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": container with ID starting with 5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.344085 4861 scope.go:117] "RemoveContainer" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.344419 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} err="failed to get container status \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": rpc error: code = NotFound desc = could not find container \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": container with ID starting with 1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.344442 4861 scope.go:117] "RemoveContainer" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.345027 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} err="failed to get container status \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": rpc error: code = NotFound desc = could not find container \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": container with ID starting with a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.345051 4861 scope.go:117] "RemoveContainer" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.345338 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} err="failed to get container status \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": rpc error: code = NotFound desc = could not find container \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": container with ID starting with 17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.345363 4861 scope.go:117] "RemoveContainer" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.345680 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} err="failed to get container status \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": rpc error: code = NotFound desc = could not find container \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": container with ID starting with a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.345705 4861 scope.go:117] "RemoveContainer" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.346045 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} err="failed to get container status \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": rpc error: code = NotFound desc = could not find container \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": container with ID starting with 85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.346076 4861 scope.go:117] "RemoveContainer" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.346710 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} err="failed to get container status \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": rpc error: code = NotFound desc = could not find container \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": container with ID starting with c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.346742 4861 scope.go:117] "RemoveContainer" containerID="efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.347040 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a"} err="failed to get container status \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": rpc error: code = NotFound desc = could not find container \"efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a\": container with ID starting with efb31a65c06544254430413ae43161716fc307b8a95a6f42b0e5a085136f832a not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.347067 4861 scope.go:117] "RemoveContainer" containerID="f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.347332 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984"} err="failed to get container status \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": rpc error: code = NotFound desc = could not find container \"f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984\": container with ID starting with f2ee45e3f91ddde95e7bdf26aed6afb1d69eb3dbbdad136c66a51a2a3a325984 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.347353 4861 scope.go:117] "RemoveContainer" containerID="3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.348633 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724"} err="failed to get container status \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": rpc error: code = NotFound desc = could not find container \"3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724\": container with ID starting with 3dadd381cc9fb8f216611723c7f3113272fdd37e424ab087ae2b516b1282c724 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.348688 4861 scope.go:117] "RemoveContainer" containerID="5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349016 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2"} err="failed to get container status \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": rpc error: code = NotFound desc = could not find container \"5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2\": container with ID starting with 5c11d2168c2a8a146f93a9048c50a0a7da936f36039a924e2e0c946f571ac6d2 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349048 4861 scope.go:117] "RemoveContainer" containerID="1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349353 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84"} err="failed to get container status \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": rpc error: code = NotFound desc = could not find container \"1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84\": container with ID starting with 1d7403a686a403bd13b8c7040a8d54e47ea882e532dbde51ff960cf2b4a7dc84 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349376 4861 scope.go:117] "RemoveContainer" containerID="a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349668 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935"} err="failed to get container status \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": rpc error: code = NotFound desc = could not find container \"a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935\": container with ID starting with a6dd78f8d0e99d19d90df2672bd0a66e48195ab147e3821b110c5b9b13fff935 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349692 4861 scope.go:117] "RemoveContainer" containerID="17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349925 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48"} err="failed to get container status \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": rpc error: code = NotFound desc = could not find container \"17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48\": container with ID starting with 17d9e0e142062e93034c5f825e1229664112d38443d5843713cac6e077737c48 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.349950 4861 scope.go:117] "RemoveContainer" containerID="a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.350253 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213"} err="failed to get container status \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": rpc error: code = NotFound desc = could not find container \"a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213\": container with ID starting with a49ed8048fd561e10cc87dfa9b39d3ff2123f2cc65f9b4402bba6bf01d161213 not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.350278 4861 scope.go:117] "RemoveContainer" containerID="85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.350516 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e"} err="failed to get container status \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": rpc error: code = NotFound desc = could not find container \"85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e\": container with ID starting with 85920cc6705b475966ceb17c3776353ec6cd31730aab339b4e5469034c49264e not found: ID does not exist" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.350540 4861 scope.go:117] "RemoveContainer" containerID="c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0" Oct 03 13:42:53 crc kubenswrapper[4861]: I1003 13:42:53.350727 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0"} err="failed to get container status \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": rpc error: code = NotFound desc = could not find container \"c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0\": container with ID starting with c4d5354382b50370a550e49f6c3deafd2a393871bb8b60c462fecdc244249ea0 not found: ID does not exist" Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.074703 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwgvx_f714b7db-082f-4c2c-8239-ba5df6986c13/kube-multus/1.log" Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.075022 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwgvx" event={"ID":"f714b7db-082f-4c2c-8239-ba5df6986c13","Type":"ContainerStarted","Data":"0544a2e98b8eef2ff3db6a3f9cd43a5b9f8eabec429bdb9d5530fdf79edfb441"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.080560 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"5b689541620cad749152072209a37451bf0c72a965e3cb866c657d261422da99"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.080733 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"5f32fea8622c9e29ef08bb791b16038ce70b451d02b30e9703a76b23ae4e95ad"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.080816 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"0be9cb474b8f72eb0737945e29ca497b267ffc0b80906ebacd8e33de16b864f8"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.080889 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"8e726bdd909ee51739366b2032a8aae545d99280fec8aa69ddf5154318d30a8e"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.080967 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"e66b0dfe1422bb93e6a74ef81d7c7cacb5a98d50b83371969ac47fc833c6780e"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.081051 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"ea9de0fa1e4228c28e4adf1e96a6630ba0f2c5d2fb1eedac65b01ee87b9fd707"} Oct 03 13:42:54 crc kubenswrapper[4861]: I1003 13:42:54.688591 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66ed4999-426b-4615-bfb3-764a3ecc950f" path="/var/lib/kubelet/pods/66ed4999-426b-4615-bfb3-764a3ecc950f/volumes" Oct 03 13:42:56 crc kubenswrapper[4861]: I1003 13:42:56.097692 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"298243ace79453f9deda8edfdbd5b911a5a5aa1a79747e7108f0b4ae681467ba"} Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.138273 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" event={"ID":"20a66b6e-e251-4052-a494-233b917d0883","Type":"ContainerStarted","Data":"8e40b27eea611a4d7acd890b5277ab19caf67ecb73080379c24932d0718662a1"} Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.138894 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.138986 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.139092 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.168918 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.177654 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" podStartSLOduration=7.177633604 podStartE2EDuration="7.177633604s" podCreationTimestamp="2025-10-03 13:42:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:42:59.168647628 +0000 UTC m=+693.166632695" watchObservedRunningTime="2025-10-03 13:42:59.177633604 +0000 UTC m=+693.175618651" Oct 03 13:42:59 crc kubenswrapper[4861]: I1003 13:42:59.182353 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:43:00 crc kubenswrapper[4861]: I1003 13:43:00.144558 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:43:00 crc kubenswrapper[4861]: I1003 13:43:00.144896 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:43:05 crc kubenswrapper[4861]: I1003 13:43:05.920393 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-654v6"] Oct 03 13:43:05 crc kubenswrapper[4861]: I1003 13:43:05.921968 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" podUID="a6a84618-0674-4410-ab88-6896066b6077" containerName="controller-manager" containerID="cri-o://ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f" gracePeriod=30 Oct 03 13:43:06 crc kubenswrapper[4861]: I1003 13:43:06.042359 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9"] Oct 03 13:43:06 crc kubenswrapper[4861]: I1003 13:43:06.042628 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" podUID="6f825716-7012-4f64-925d-fdf69a3b8b28" containerName="route-controller-manager" containerID="cri-o://dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd" gracePeriod=30 Oct 03 13:43:06 crc kubenswrapper[4861]: I1003 13:43:06.948962 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.014345 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.033281 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8cpv\" (UniqueName: \"kubernetes.io/projected/a6a84618-0674-4410-ab88-6896066b6077-kube-api-access-n8cpv\") pod \"a6a84618-0674-4410-ab88-6896066b6077\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.033331 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6a84618-0674-4410-ab88-6896066b6077-serving-cert\") pod \"a6a84618-0674-4410-ab88-6896066b6077\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.033387 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-client-ca\") pod \"a6a84618-0674-4410-ab88-6896066b6077\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.033407 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-config\") pod \"a6a84618-0674-4410-ab88-6896066b6077\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.033441 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-proxy-ca-bundles\") pod \"a6a84618-0674-4410-ab88-6896066b6077\" (UID: \"a6a84618-0674-4410-ab88-6896066b6077\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.034514 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a6a84618-0674-4410-ab88-6896066b6077" (UID: "a6a84618-0674-4410-ab88-6896066b6077"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.034973 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-config" (OuterVolumeSpecName: "config") pod "a6a84618-0674-4410-ab88-6896066b6077" (UID: "a6a84618-0674-4410-ab88-6896066b6077"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.035537 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-client-ca" (OuterVolumeSpecName: "client-ca") pod "a6a84618-0674-4410-ab88-6896066b6077" (UID: "a6a84618-0674-4410-ab88-6896066b6077"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.053732 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6a84618-0674-4410-ab88-6896066b6077-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a6a84618-0674-4410-ab88-6896066b6077" (UID: "a6a84618-0674-4410-ab88-6896066b6077"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.055400 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6a84618-0674-4410-ab88-6896066b6077-kube-api-access-n8cpv" (OuterVolumeSpecName: "kube-api-access-n8cpv") pod "a6a84618-0674-4410-ab88-6896066b6077" (UID: "a6a84618-0674-4410-ab88-6896066b6077"). InnerVolumeSpecName "kube-api-access-n8cpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134170 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkq6j\" (UniqueName: \"kubernetes.io/projected/6f825716-7012-4f64-925d-fdf69a3b8b28-kube-api-access-qkq6j\") pod \"6f825716-7012-4f64-925d-fdf69a3b8b28\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134267 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-config\") pod \"6f825716-7012-4f64-925d-fdf69a3b8b28\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134376 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-client-ca\") pod \"6f825716-7012-4f64-925d-fdf69a3b8b28\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134407 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f825716-7012-4f64-925d-fdf69a3b8b28-serving-cert\") pod \"6f825716-7012-4f64-925d-fdf69a3b8b28\" (UID: \"6f825716-7012-4f64-925d-fdf69a3b8b28\") " Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134658 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8cpv\" (UniqueName: \"kubernetes.io/projected/a6a84618-0674-4410-ab88-6896066b6077-kube-api-access-n8cpv\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134675 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6a84618-0674-4410-ab88-6896066b6077-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134686 4861 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134695 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.134704 4861 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6a84618-0674-4410-ab88-6896066b6077-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.135101 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-client-ca" (OuterVolumeSpecName: "client-ca") pod "6f825716-7012-4f64-925d-fdf69a3b8b28" (UID: "6f825716-7012-4f64-925d-fdf69a3b8b28"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.135771 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-config" (OuterVolumeSpecName: "config") pod "6f825716-7012-4f64-925d-fdf69a3b8b28" (UID: "6f825716-7012-4f64-925d-fdf69a3b8b28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.137763 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f825716-7012-4f64-925d-fdf69a3b8b28-kube-api-access-qkq6j" (OuterVolumeSpecName: "kube-api-access-qkq6j") pod "6f825716-7012-4f64-925d-fdf69a3b8b28" (UID: "6f825716-7012-4f64-925d-fdf69a3b8b28"). InnerVolumeSpecName "kube-api-access-qkq6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.138382 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f825716-7012-4f64-925d-fdf69a3b8b28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6f825716-7012-4f64-925d-fdf69a3b8b28" (UID: "6f825716-7012-4f64-925d-fdf69a3b8b28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.178829 4861 generic.go:334] "Generic (PLEG): container finished" podID="a6a84618-0674-4410-ab88-6896066b6077" containerID="ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f" exitCode=0 Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.178927 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" event={"ID":"a6a84618-0674-4410-ab88-6896066b6077","Type":"ContainerDied","Data":"ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f"} Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.178963 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" event={"ID":"a6a84618-0674-4410-ab88-6896066b6077","Type":"ContainerDied","Data":"a154c518d8f13f980c687014a54da3cc027487c02e5bd17ed36aab394cfff116"} Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.178965 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-654v6" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.178984 4861 scope.go:117] "RemoveContainer" containerID="ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.184366 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.184436 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" event={"ID":"6f825716-7012-4f64-925d-fdf69a3b8b28","Type":"ContainerDied","Data":"dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd"} Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.184448 4861 generic.go:334] "Generic (PLEG): container finished" podID="6f825716-7012-4f64-925d-fdf69a3b8b28" containerID="dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd" exitCode=0 Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.184476 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9" event={"ID":"6f825716-7012-4f64-925d-fdf69a3b8b28","Type":"ContainerDied","Data":"8892703c17b4be57ae35707c84eab3755fc33d1e950b8d5138c6ae8a78e45626"} Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.200713 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-68fcd589d8-tqqvk"] Oct 03 13:43:07 crc kubenswrapper[4861]: E1003 13:43:07.201077 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a84618-0674-4410-ab88-6896066b6077" containerName="controller-manager" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.201114 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a84618-0674-4410-ab88-6896066b6077" containerName="controller-manager" Oct 03 13:43:07 crc kubenswrapper[4861]: E1003 13:43:07.201137 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f825716-7012-4f64-925d-fdf69a3b8b28" containerName="route-controller-manager" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.201146 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f825716-7012-4f64-925d-fdf69a3b8b28" containerName="route-controller-manager" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.201273 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6a84618-0674-4410-ab88-6896066b6077" containerName="controller-manager" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.201287 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f825716-7012-4f64-925d-fdf69a3b8b28" containerName="route-controller-manager" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.201740 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.213644 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.214647 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.216519 4861 scope.go:117] "RemoveContainer" containerID="ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.216769 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.217214 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 13:43:07 crc kubenswrapper[4861]: E1003 13:43:07.217293 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f\": container with ID starting with ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f not found: ID does not exist" containerID="ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.217343 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f"} err="failed to get container status \"ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f\": rpc error: code = NotFound desc = could not find container \"ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f\": container with ID starting with ac191b98f4c14b5034fef72421bc5fbc7b35546e97318f247c42146d9b1a773f not found: ID does not exist" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.217379 4861 scope.go:117] "RemoveContainer" containerID="dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.217486 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.217934 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.218188 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.218352 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.218461 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.218555 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.218727 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.221189 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.221445 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.221589 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.223574 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-68fcd589d8-tqqvk"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.246260 4861 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.246291 4861 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f825716-7012-4f64-925d-fdf69a3b8b28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.246310 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkq6j\" (UniqueName: \"kubernetes.io/projected/6f825716-7012-4f64-925d-fdf69a3b8b28-kube-api-access-qkq6j\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.246323 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f825716-7012-4f64-925d-fdf69a3b8b28-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.246444 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.262551 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-654v6"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.263101 4861 scope.go:117] "RemoveContainer" containerID="dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd" Oct 03 13:43:07 crc kubenswrapper[4861]: E1003 13:43:07.265521 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd\": container with ID starting with dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd not found: ID does not exist" containerID="dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.265585 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd"} err="failed to get container status \"dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd\": rpc error: code = NotFound desc = could not find container \"dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd\": container with ID starting with dfecd76ed3d85900808a394017d33bcf12cbd9a3750dbb13a8e7f7df0fa75dfd not found: ID does not exist" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.269601 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.274760 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-654v6"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.284915 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.287668 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gjvs9"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.347840 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2adae60-fe6c-41d3-9362-18e19f76630c-client-ca\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.347900 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjx7b\" (UniqueName: \"kubernetes.io/projected/c2adae60-fe6c-41d3-9362-18e19f76630c-kube-api-access-bjx7b\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.347925 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-proxy-ca-bundles\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.347947 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2adae60-fe6c-41d3-9362-18e19f76630c-config\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.347972 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2kw7\" (UniqueName: \"kubernetes.io/projected/a547e8df-59ec-47e6-95b6-3b1cc999f084-kube-api-access-c2kw7\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.347987 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-client-ca\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.348156 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2adae60-fe6c-41d3-9362-18e19f76630c-serving-cert\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.348302 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-config\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.348410 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a547e8df-59ec-47e6-95b6-3b1cc999f084-serving-cert\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450292 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2adae60-fe6c-41d3-9362-18e19f76630c-serving-cert\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450356 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-config\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450403 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a547e8df-59ec-47e6-95b6-3b1cc999f084-serving-cert\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450438 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2adae60-fe6c-41d3-9362-18e19f76630c-client-ca\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450466 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjx7b\" (UniqueName: \"kubernetes.io/projected/c2adae60-fe6c-41d3-9362-18e19f76630c-kube-api-access-bjx7b\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450487 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-proxy-ca-bundles\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450514 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2adae60-fe6c-41d3-9362-18e19f76630c-config\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450541 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-client-ca\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.450561 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2kw7\" (UniqueName: \"kubernetes.io/projected/a547e8df-59ec-47e6-95b6-3b1cc999f084-kube-api-access-c2kw7\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.451911 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-client-ca\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.452174 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2adae60-fe6c-41d3-9362-18e19f76630c-config\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.452176 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2adae60-fe6c-41d3-9362-18e19f76630c-client-ca\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.452776 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-config\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.453035 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a547e8df-59ec-47e6-95b6-3b1cc999f084-proxy-ca-bundles\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.454493 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a547e8df-59ec-47e6-95b6-3b1cc999f084-serving-cert\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.459924 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2adae60-fe6c-41d3-9362-18e19f76630c-serving-cert\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.470538 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjx7b\" (UniqueName: \"kubernetes.io/projected/c2adae60-fe6c-41d3-9362-18e19f76630c-kube-api-access-bjx7b\") pod \"route-controller-manager-7d895bb644-7m5t8\" (UID: \"c2adae60-fe6c-41d3-9362-18e19f76630c\") " pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.474792 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2kw7\" (UniqueName: \"kubernetes.io/projected/a547e8df-59ec-47e6-95b6-3b1cc999f084-kube-api-access-c2kw7\") pod \"controller-manager-68fcd589d8-tqqvk\" (UID: \"a547e8df-59ec-47e6-95b6-3b1cc999f084\") " pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.551411 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.567143 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.982666 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8"] Oct 03 13:43:07 crc kubenswrapper[4861]: I1003 13:43:07.988843 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-68fcd589d8-tqqvk"] Oct 03 13:43:08 crc kubenswrapper[4861]: I1003 13:43:08.195416 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" event={"ID":"c2adae60-fe6c-41d3-9362-18e19f76630c","Type":"ContainerStarted","Data":"ee74fff95de0fd92c0c78469912d3c52e2166a7797fc7d6d913d8aa8a12cdab8"} Oct 03 13:43:08 crc kubenswrapper[4861]: I1003 13:43:08.196666 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" event={"ID":"a547e8df-59ec-47e6-95b6-3b1cc999f084","Type":"ContainerStarted","Data":"6e95aabb9b9926ef0b5fd9670325f051b79eb43e20226e16d8e25578235b5bf1"} Oct 03 13:43:08 crc kubenswrapper[4861]: I1003 13:43:08.688397 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f825716-7012-4f64-925d-fdf69a3b8b28" path="/var/lib/kubelet/pods/6f825716-7012-4f64-925d-fdf69a3b8b28/volumes" Oct 03 13:43:08 crc kubenswrapper[4861]: I1003 13:43:08.689341 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6a84618-0674-4410-ab88-6896066b6077" path="/var/lib/kubelet/pods/a6a84618-0674-4410-ab88-6896066b6077/volumes" Oct 03 13:43:11 crc kubenswrapper[4861]: I1003 13:43:11.214888 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" event={"ID":"a547e8df-59ec-47e6-95b6-3b1cc999f084","Type":"ContainerStarted","Data":"2af144f6529e9b02e66e559e7ea318a0b51b84daeff7c70060270c3b77e51599"} Oct 03 13:43:12 crc kubenswrapper[4861]: I1003 13:43:12.229675 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" event={"ID":"c2adae60-fe6c-41d3-9362-18e19f76630c","Type":"ContainerStarted","Data":"6921fc6aa06cb258cd65e45f5cb32c7600eec816e5236e0147e47ff5196a12b0"} Oct 03 13:43:13 crc kubenswrapper[4861]: I1003 13:43:13.235048 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:13 crc kubenswrapper[4861]: I1003 13:43:13.244350 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" Oct 03 13:43:13 crc kubenswrapper[4861]: I1003 13:43:13.275429 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-68fcd589d8-tqqvk" podStartSLOduration=7.275406591 podStartE2EDuration="7.275406591s" podCreationTimestamp="2025-10-03 13:43:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:43:13.258646281 +0000 UTC m=+707.256631328" watchObservedRunningTime="2025-10-03 13:43:13.275406591 +0000 UTC m=+707.273391648" Oct 03 13:43:14 crc kubenswrapper[4861]: I1003 13:43:14.240161 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:14 crc kubenswrapper[4861]: I1003 13:43:14.246314 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" Oct 03 13:43:14 crc kubenswrapper[4861]: I1003 13:43:14.264027 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d895bb644-7m5t8" podStartSLOduration=8.264005697 podStartE2EDuration="8.264005697s" podCreationTimestamp="2025-10-03 13:43:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:43:14.262064016 +0000 UTC m=+708.260049073" watchObservedRunningTime="2025-10-03 13:43:14.264005697 +0000 UTC m=+708.261990744" Oct 03 13:43:16 crc kubenswrapper[4861]: I1003 13:43:16.643489 4861 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 13:43:22 crc kubenswrapper[4861]: I1003 13:43:22.827245 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ts4sc" Oct 03 13:43:30 crc kubenswrapper[4861]: I1003 13:43:30.144957 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:43:30 crc kubenswrapper[4861]: I1003 13:43:30.145760 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:43:30 crc kubenswrapper[4861]: I1003 13:43:30.900332 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv"] Oct 03 13:43:30 crc kubenswrapper[4861]: I1003 13:43:30.901557 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:30 crc kubenswrapper[4861]: I1003 13:43:30.908118 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 13:43:30 crc kubenswrapper[4861]: I1003 13:43:30.916729 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv"] Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.048658 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.048721 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mckpl\" (UniqueName: \"kubernetes.io/projected/297c9b28-92e3-4c8e-bb08-63af1b637718-kube-api-access-mckpl\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.048806 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.149927 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mckpl\" (UniqueName: \"kubernetes.io/projected/297c9b28-92e3-4c8e-bb08-63af1b637718-kube-api-access-mckpl\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.149991 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.150031 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.150583 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.150652 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.175073 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mckpl\" (UniqueName: \"kubernetes.io/projected/297c9b28-92e3-4c8e-bb08-63af1b637718-kube-api-access-mckpl\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.218782 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:31 crc kubenswrapper[4861]: I1003 13:43:31.630183 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv"] Oct 03 13:43:32 crc kubenswrapper[4861]: I1003 13:43:32.337205 4861 generic.go:334] "Generic (PLEG): container finished" podID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerID="59c12b8aaf3571b93850b873e370e25f1b3e7a0fc65ebf4e369dba873b2250e1" exitCode=0 Oct 03 13:43:32 crc kubenswrapper[4861]: I1003 13:43:32.337264 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" event={"ID":"297c9b28-92e3-4c8e-bb08-63af1b637718","Type":"ContainerDied","Data":"59c12b8aaf3571b93850b873e370e25f1b3e7a0fc65ebf4e369dba873b2250e1"} Oct 03 13:43:32 crc kubenswrapper[4861]: I1003 13:43:32.337328 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" event={"ID":"297c9b28-92e3-4c8e-bb08-63af1b637718","Type":"ContainerStarted","Data":"b8fc4ccc7527f44ad8141acbae3dba0a823b9a4538f1042eefe86f197a9221e5"} Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.201045 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rb8d2"] Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.202605 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.218123 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rb8d2"] Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.280316 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9jc7\" (UniqueName: \"kubernetes.io/projected/496da619-6355-481d-8414-ea68f5e7685a-kube-api-access-f9jc7\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.280362 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-catalog-content\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.280403 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-utilities\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.381367 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-catalog-content\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.381457 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-utilities\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.381534 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9jc7\" (UniqueName: \"kubernetes.io/projected/496da619-6355-481d-8414-ea68f5e7685a-kube-api-access-f9jc7\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.382410 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-catalog-content\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.382516 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-utilities\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.404881 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9jc7\" (UniqueName: \"kubernetes.io/projected/496da619-6355-481d-8414-ea68f5e7685a-kube-api-access-f9jc7\") pod \"redhat-operators-rb8d2\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.527177 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:33 crc kubenswrapper[4861]: I1003 13:43:33.955596 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rb8d2"] Oct 03 13:43:34 crc kubenswrapper[4861]: I1003 13:43:34.347630 4861 generic.go:334] "Generic (PLEG): container finished" podID="496da619-6355-481d-8414-ea68f5e7685a" containerID="c0d243c4a8fbea7d64063096e74b91a1f6b0553650ca51ca68d9468425e7a393" exitCode=0 Oct 03 13:43:34 crc kubenswrapper[4861]: I1003 13:43:34.347726 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerDied","Data":"c0d243c4a8fbea7d64063096e74b91a1f6b0553650ca51ca68d9468425e7a393"} Oct 03 13:43:34 crc kubenswrapper[4861]: I1003 13:43:34.347758 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerStarted","Data":"af06e049c7917fe9b68fa984e0f18e5819539d7c1d7ffc5395fa4311caac4a8a"} Oct 03 13:43:34 crc kubenswrapper[4861]: I1003 13:43:34.350446 4861 generic.go:334] "Generic (PLEG): container finished" podID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerID="5ca756a2726c23c56b99c86672f126dac4475a7eb6c8abe8426b4f43a51438e4" exitCode=0 Oct 03 13:43:34 crc kubenswrapper[4861]: I1003 13:43:34.350483 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" event={"ID":"297c9b28-92e3-4c8e-bb08-63af1b637718","Type":"ContainerDied","Data":"5ca756a2726c23c56b99c86672f126dac4475a7eb6c8abe8426b4f43a51438e4"} Oct 03 13:43:35 crc kubenswrapper[4861]: I1003 13:43:35.359201 4861 generic.go:334] "Generic (PLEG): container finished" podID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerID="97e4671c4af8030797bcae0c7db5f9b76ee6c5e9948f473b609f70983e59d82a" exitCode=0 Oct 03 13:43:35 crc kubenswrapper[4861]: I1003 13:43:35.359282 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" event={"ID":"297c9b28-92e3-4c8e-bb08-63af1b637718","Type":"ContainerDied","Data":"97e4671c4af8030797bcae0c7db5f9b76ee6c5e9948f473b609f70983e59d82a"} Oct 03 13:43:35 crc kubenswrapper[4861]: I1003 13:43:35.361383 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerStarted","Data":"dcfd744d4f5b0a7ff0aff43aee854aaff541908b4d7298bae8663d836bcc7a45"} Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.369343 4861 generic.go:334] "Generic (PLEG): container finished" podID="496da619-6355-481d-8414-ea68f5e7685a" containerID="dcfd744d4f5b0a7ff0aff43aee854aaff541908b4d7298bae8663d836bcc7a45" exitCode=0 Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.369413 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerDied","Data":"dcfd744d4f5b0a7ff0aff43aee854aaff541908b4d7298bae8663d836bcc7a45"} Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.719749 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.820754 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-util\") pod \"297c9b28-92e3-4c8e-bb08-63af1b637718\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.820810 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mckpl\" (UniqueName: \"kubernetes.io/projected/297c9b28-92e3-4c8e-bb08-63af1b637718-kube-api-access-mckpl\") pod \"297c9b28-92e3-4c8e-bb08-63af1b637718\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.820839 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-bundle\") pod \"297c9b28-92e3-4c8e-bb08-63af1b637718\" (UID: \"297c9b28-92e3-4c8e-bb08-63af1b637718\") " Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.821619 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-bundle" (OuterVolumeSpecName: "bundle") pod "297c9b28-92e3-4c8e-bb08-63af1b637718" (UID: "297c9b28-92e3-4c8e-bb08-63af1b637718"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.834516 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/297c9b28-92e3-4c8e-bb08-63af1b637718-kube-api-access-mckpl" (OuterVolumeSpecName: "kube-api-access-mckpl") pod "297c9b28-92e3-4c8e-bb08-63af1b637718" (UID: "297c9b28-92e3-4c8e-bb08-63af1b637718"). InnerVolumeSpecName "kube-api-access-mckpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.838492 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-util" (OuterVolumeSpecName: "util") pod "297c9b28-92e3-4c8e-bb08-63af1b637718" (UID: "297c9b28-92e3-4c8e-bb08-63af1b637718"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.921958 4861 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-util\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.921998 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mckpl\" (UniqueName: \"kubernetes.io/projected/297c9b28-92e3-4c8e-bb08-63af1b637718-kube-api-access-mckpl\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:36 crc kubenswrapper[4861]: I1003 13:43:36.922010 4861 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/297c9b28-92e3-4c8e-bb08-63af1b637718-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:37 crc kubenswrapper[4861]: I1003 13:43:37.376379 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerStarted","Data":"141a6e36d974bc9c702dad707fe17a429155abc74d8c7fa5c4a31ad794d7efea"} Oct 03 13:43:37 crc kubenswrapper[4861]: I1003 13:43:37.379214 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" event={"ID":"297c9b28-92e3-4c8e-bb08-63af1b637718","Type":"ContainerDied","Data":"b8fc4ccc7527f44ad8141acbae3dba0a823b9a4538f1042eefe86f197a9221e5"} Oct 03 13:43:37 crc kubenswrapper[4861]: I1003 13:43:37.379406 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8fc4ccc7527f44ad8141acbae3dba0a823b9a4538f1042eefe86f197a9221e5" Oct 03 13:43:37 crc kubenswrapper[4861]: I1003 13:43:37.379281 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv" Oct 03 13:43:37 crc kubenswrapper[4861]: I1003 13:43:37.403287 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rb8d2" podStartSLOduration=1.884843254 podStartE2EDuration="4.40326796s" podCreationTimestamp="2025-10-03 13:43:33 +0000 UTC" firstStartedPulling="2025-10-03 13:43:34.348990134 +0000 UTC m=+728.346975181" lastFinishedPulling="2025-10-03 13:43:36.86741485 +0000 UTC m=+730.865399887" observedRunningTime="2025-10-03 13:43:37.399993713 +0000 UTC m=+731.397978770" watchObservedRunningTime="2025-10-03 13:43:37.40326796 +0000 UTC m=+731.401253017" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.700843 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn"] Oct 03 13:43:38 crc kubenswrapper[4861]: E1003 13:43:38.701140 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="extract" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.701155 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="extract" Oct 03 13:43:38 crc kubenswrapper[4861]: E1003 13:43:38.701174 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="util" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.701183 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="util" Oct 03 13:43:38 crc kubenswrapper[4861]: E1003 13:43:38.701198 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="pull" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.701206 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="pull" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.701354 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="297c9b28-92e3-4c8e-bb08-63af1b637718" containerName="extract" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.701797 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.706190 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.709152 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-8rktp" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.711777 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.730098 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn"] Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.849572 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6c46\" (UniqueName: \"kubernetes.io/projected/28617d69-b62d-41a8-bb48-d89be9a37676-kube-api-access-j6c46\") pod \"nmstate-operator-858ddd8f98-kxqsn\" (UID: \"28617d69-b62d-41a8-bb48-d89be9a37676\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.950615 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6c46\" (UniqueName: \"kubernetes.io/projected/28617d69-b62d-41a8-bb48-d89be9a37676-kube-api-access-j6c46\") pod \"nmstate-operator-858ddd8f98-kxqsn\" (UID: \"28617d69-b62d-41a8-bb48-d89be9a37676\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" Oct 03 13:43:38 crc kubenswrapper[4861]: I1003 13:43:38.975041 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6c46\" (UniqueName: \"kubernetes.io/projected/28617d69-b62d-41a8-bb48-d89be9a37676-kube-api-access-j6c46\") pod \"nmstate-operator-858ddd8f98-kxqsn\" (UID: \"28617d69-b62d-41a8-bb48-d89be9a37676\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" Oct 03 13:43:39 crc kubenswrapper[4861]: I1003 13:43:39.018325 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" Oct 03 13:43:39 crc kubenswrapper[4861]: I1003 13:43:39.471628 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn"] Oct 03 13:43:39 crc kubenswrapper[4861]: W1003 13:43:39.492112 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28617d69_b62d_41a8_bb48_d89be9a37676.slice/crio-cbeae0fdaab5e487ce226155f343c8ce27f69f39de7d6ebb57984cf25a662404 WatchSource:0}: Error finding container cbeae0fdaab5e487ce226155f343c8ce27f69f39de7d6ebb57984cf25a662404: Status 404 returned error can't find the container with id cbeae0fdaab5e487ce226155f343c8ce27f69f39de7d6ebb57984cf25a662404 Oct 03 13:43:40 crc kubenswrapper[4861]: I1003 13:43:40.394503 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" event={"ID":"28617d69-b62d-41a8-bb48-d89be9a37676","Type":"ContainerStarted","Data":"cbeae0fdaab5e487ce226155f343c8ce27f69f39de7d6ebb57984cf25a662404"} Oct 03 13:43:43 crc kubenswrapper[4861]: I1003 13:43:43.411342 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" event={"ID":"28617d69-b62d-41a8-bb48-d89be9a37676","Type":"ContainerStarted","Data":"f2963b7ebd94840e93bffcd58266265f2132ff2e041d42afd889786fd1db8c27"} Oct 03 13:43:43 crc kubenswrapper[4861]: I1003 13:43:43.432269 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kxqsn" podStartSLOduration=2.01703344 podStartE2EDuration="5.432220867s" podCreationTimestamp="2025-10-03 13:43:38 +0000 UTC" firstStartedPulling="2025-10-03 13:43:39.495007109 +0000 UTC m=+733.492992156" lastFinishedPulling="2025-10-03 13:43:42.910194536 +0000 UTC m=+736.908179583" observedRunningTime="2025-10-03 13:43:43.427858311 +0000 UTC m=+737.425843368" watchObservedRunningTime="2025-10-03 13:43:43.432220867 +0000 UTC m=+737.430205914" Oct 03 13:43:43 crc kubenswrapper[4861]: I1003 13:43:43.528303 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:43 crc kubenswrapper[4861]: I1003 13:43:43.528381 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:43 crc kubenswrapper[4861]: I1003 13:43:43.569695 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.306325 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.307206 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.310891 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-wm7bg" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.326819 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.330886 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.331729 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.333853 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.349919 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-thvxb"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.350957 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.373504 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422101 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfr5d\" (UniqueName: \"kubernetes.io/projected/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-kube-api-access-kfr5d\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422172 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-ovs-socket\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422276 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422307 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8q5c\" (UniqueName: \"kubernetes.io/projected/74a61ffa-6414-4072-952f-d3a9e5df2cad-kube-api-access-z8q5c\") pod \"nmstate-metrics-fdff9cb8d-b4mxl\" (UID: \"74a61ffa-6414-4072-952f-d3a9e5df2cad\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422359 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-dbus-socket\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422388 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vhfj\" (UniqueName: \"kubernetes.io/projected/c1ec679d-6458-489b-bdb3-6c6ec465d695-kube-api-access-4vhfj\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.422495 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-nmstate-lock\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.473158 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.501284 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.502750 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.510103 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.510224 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wjkvd" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.510357 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523110 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523161 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8q5c\" (UniqueName: \"kubernetes.io/projected/74a61ffa-6414-4072-952f-d3a9e5df2cad-kube-api-access-z8q5c\") pod \"nmstate-metrics-fdff9cb8d-b4mxl\" (UID: \"74a61ffa-6414-4072-952f-d3a9e5df2cad\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523185 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-dbus-socket\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523209 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vhfj\" (UniqueName: \"kubernetes.io/projected/c1ec679d-6458-489b-bdb3-6c6ec465d695-kube-api-access-4vhfj\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523297 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-nmstate-lock\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: E1003 13:43:44.523310 4861 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 03 13:43:44 crc kubenswrapper[4861]: E1003 13:43:44.523386 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-tls-key-pair podName:cecb92af-dba6-4e7b-825c-97d7fcd5cc5d nodeName:}" failed. No retries permitted until 2025-10-03 13:43:45.023364893 +0000 UTC m=+739.021350000 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-tls-key-pair") pod "nmstate-webhook-6cdbc54649-v6q9r" (UID: "cecb92af-dba6-4e7b-825c-97d7fcd5cc5d") : secret "openshift-nmstate-webhook" not found Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523324 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfr5d\" (UniqueName: \"kubernetes.io/projected/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-kube-api-access-kfr5d\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523569 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-ovs-socket\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.523735 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-ovs-socket\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.524302 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-nmstate-lock\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.524487 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c1ec679d-6458-489b-bdb3-6c6ec465d695-dbus-socket\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.555527 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vhfj\" (UniqueName: \"kubernetes.io/projected/c1ec679d-6458-489b-bdb3-6c6ec465d695-kube-api-access-4vhfj\") pod \"nmstate-handler-thvxb\" (UID: \"c1ec679d-6458-489b-bdb3-6c6ec465d695\") " pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.557134 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfr5d\" (UniqueName: \"kubernetes.io/projected/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-kube-api-access-kfr5d\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.568625 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8q5c\" (UniqueName: \"kubernetes.io/projected/74a61ffa-6414-4072-952f-d3a9e5df2cad-kube-api-access-z8q5c\") pod \"nmstate-metrics-fdff9cb8d-b4mxl\" (UID: \"74a61ffa-6414-4072-952f-d3a9e5df2cad\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.585666 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.622502 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.628368 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4f6s\" (UniqueName: \"kubernetes.io/projected/767a82d2-0bea-436b-b63a-c5bbf0de86b8-kube-api-access-z4f6s\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.629556 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/767a82d2-0bea-436b-b63a-c5bbf0de86b8-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.629645 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/767a82d2-0bea-436b-b63a-c5bbf0de86b8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.673546 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:44 crc kubenswrapper[4861]: W1003 13:43:44.726789 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1ec679d_6458_489b_bdb3_6c6ec465d695.slice/crio-f430d69320bdeeb386ff373f2fa4d05ae16d2cb2a05e6c40c8b5fadc252804a5 WatchSource:0}: Error finding container f430d69320bdeeb386ff373f2fa4d05ae16d2cb2a05e6c40c8b5fadc252804a5: Status 404 returned error can't find the container with id f430d69320bdeeb386ff373f2fa4d05ae16d2cb2a05e6c40c8b5fadc252804a5 Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.731115 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4f6s\" (UniqueName: \"kubernetes.io/projected/767a82d2-0bea-436b-b63a-c5bbf0de86b8-kube-api-access-z4f6s\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.731437 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/767a82d2-0bea-436b-b63a-c5bbf0de86b8-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.731560 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/767a82d2-0bea-436b-b63a-c5bbf0de86b8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.733113 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/767a82d2-0bea-436b-b63a-c5bbf0de86b8-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.740123 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/767a82d2-0bea-436b-b63a-c5bbf0de86b8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.763423 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5557b87f9c-zzzj9"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.764436 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.774520 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4f6s\" (UniqueName: \"kubernetes.io/projected/767a82d2-0bea-436b-b63a-c5bbf0de86b8-kube-api-access-z4f6s\") pod \"nmstate-console-plugin-6b874cbd85-jmr9f\" (UID: \"767a82d2-0bea-436b-b63a-c5bbf0de86b8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.780107 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5557b87f9c-zzzj9"] Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.823275 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835145 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-oauth-config\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835314 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh86d\" (UniqueName: \"kubernetes.io/projected/2a5db69d-e844-4484-978a-bc7461fcd2f3-kube-api-access-vh86d\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835402 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-trusted-ca-bundle\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835440 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-serving-cert\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835519 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-oauth-serving-cert\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835653 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-service-ca\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.835685 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-config\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939189 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-oauth-config\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939263 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh86d\" (UniqueName: \"kubernetes.io/projected/2a5db69d-e844-4484-978a-bc7461fcd2f3-kube-api-access-vh86d\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-trusted-ca-bundle\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939305 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-serving-cert\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939362 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-oauth-serving-cert\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939392 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-service-ca\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.939430 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-config\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.940593 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-service-ca\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.940635 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-trusted-ca-bundle\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.940693 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-config\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.941598 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2a5db69d-e844-4484-978a-bc7461fcd2f3-oauth-serving-cert\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.958607 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-serving-cert\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.959156 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2a5db69d-e844-4484-978a-bc7461fcd2f3-console-oauth-config\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:44 crc kubenswrapper[4861]: I1003 13:43:44.987658 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh86d\" (UniqueName: \"kubernetes.io/projected/2a5db69d-e844-4484-978a-bc7461fcd2f3-kube-api-access-vh86d\") pod \"console-5557b87f9c-zzzj9\" (UID: \"2a5db69d-e844-4484-978a-bc7461fcd2f3\") " pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.040943 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.045340 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cecb92af-dba6-4e7b-825c-97d7fcd5cc5d-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v6q9r\" (UID: \"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.130640 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.192573 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rb8d2"] Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.244216 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.255811 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl"] Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.322876 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f"] Oct 03 13:43:45 crc kubenswrapper[4861]: W1003 13:43:45.338462 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod767a82d2_0bea_436b_b63a_c5bbf0de86b8.slice/crio-fe4693b1b12c0f46b30994f440d2399cdd4dc00bff70cb5ea8d3cc755fed3388 WatchSource:0}: Error finding container fe4693b1b12c0f46b30994f440d2399cdd4dc00bff70cb5ea8d3cc755fed3388: Status 404 returned error can't find the container with id fe4693b1b12c0f46b30994f440d2399cdd4dc00bff70cb5ea8d3cc755fed3388 Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.425089 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" event={"ID":"74a61ffa-6414-4072-952f-d3a9e5df2cad","Type":"ContainerStarted","Data":"b9add04d10e8b1ade519bb5a0562ffdf5f6c850eb297223a040d4d41e1dd3f03"} Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.426428 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" event={"ID":"767a82d2-0bea-436b-b63a-c5bbf0de86b8","Type":"ContainerStarted","Data":"fe4693b1b12c0f46b30994f440d2399cdd4dc00bff70cb5ea8d3cc755fed3388"} Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.428364 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-thvxb" event={"ID":"c1ec679d-6458-489b-bdb3-6c6ec465d695","Type":"ContainerStarted","Data":"f430d69320bdeeb386ff373f2fa4d05ae16d2cb2a05e6c40c8b5fadc252804a5"} Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.560117 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5557b87f9c-zzzj9"] Oct 03 13:43:45 crc kubenswrapper[4861]: W1003 13:43:45.567099 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a5db69d_e844_4484_978a_bc7461fcd2f3.slice/crio-7eb7195d961d22a42112141c1f57062f901ac38ad5f8b1a1e71d0c72e4070231 WatchSource:0}: Error finding container 7eb7195d961d22a42112141c1f57062f901ac38ad5f8b1a1e71d0c72e4070231: Status 404 returned error can't find the container with id 7eb7195d961d22a42112141c1f57062f901ac38ad5f8b1a1e71d0c72e4070231 Oct 03 13:43:45 crc kubenswrapper[4861]: I1003 13:43:45.664015 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r"] Oct 03 13:43:45 crc kubenswrapper[4861]: W1003 13:43:45.674146 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcecb92af_dba6_4e7b_825c_97d7fcd5cc5d.slice/crio-ec529b6b11578a9f4eacad07f09459c4111bf874b7b8f8cfb989f926f0b2773b WatchSource:0}: Error finding container ec529b6b11578a9f4eacad07f09459c4111bf874b7b8f8cfb989f926f0b2773b: Status 404 returned error can't find the container with id ec529b6b11578a9f4eacad07f09459c4111bf874b7b8f8cfb989f926f0b2773b Oct 03 13:43:46 crc kubenswrapper[4861]: I1003 13:43:46.434316 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" event={"ID":"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d","Type":"ContainerStarted","Data":"ec529b6b11578a9f4eacad07f09459c4111bf874b7b8f8cfb989f926f0b2773b"} Oct 03 13:43:46 crc kubenswrapper[4861]: I1003 13:43:46.436823 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5557b87f9c-zzzj9" event={"ID":"2a5db69d-e844-4484-978a-bc7461fcd2f3","Type":"ContainerStarted","Data":"6a6920fc1748c96cc4d4feaeb802c25d4b5140dce6a0256e101442727a33518d"} Oct 03 13:43:46 crc kubenswrapper[4861]: I1003 13:43:46.436860 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5557b87f9c-zzzj9" event={"ID":"2a5db69d-e844-4484-978a-bc7461fcd2f3","Type":"ContainerStarted","Data":"7eb7195d961d22a42112141c1f57062f901ac38ad5f8b1a1e71d0c72e4070231"} Oct 03 13:43:46 crc kubenswrapper[4861]: I1003 13:43:46.436946 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rb8d2" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="registry-server" containerID="cri-o://141a6e36d974bc9c702dad707fe17a429155abc74d8c7fa5c4a31ad794d7efea" gracePeriod=2 Oct 03 13:43:46 crc kubenswrapper[4861]: I1003 13:43:46.463262 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5557b87f9c-zzzj9" podStartSLOduration=2.463210312 podStartE2EDuration="2.463210312s" podCreationTimestamp="2025-10-03 13:43:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:43:46.461756643 +0000 UTC m=+740.459741710" watchObservedRunningTime="2025-10-03 13:43:46.463210312 +0000 UTC m=+740.461195359" Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.445291 4861 generic.go:334] "Generic (PLEG): container finished" podID="496da619-6355-481d-8414-ea68f5e7685a" containerID="141a6e36d974bc9c702dad707fe17a429155abc74d8c7fa5c4a31ad794d7efea" exitCode=0 Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.446321 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerDied","Data":"141a6e36d974bc9c702dad707fe17a429155abc74d8c7fa5c4a31ad794d7efea"} Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.778664 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.882576 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-utilities\") pod \"496da619-6355-481d-8414-ea68f5e7685a\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.882716 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9jc7\" (UniqueName: \"kubernetes.io/projected/496da619-6355-481d-8414-ea68f5e7685a-kube-api-access-f9jc7\") pod \"496da619-6355-481d-8414-ea68f5e7685a\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.882766 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-catalog-content\") pod \"496da619-6355-481d-8414-ea68f5e7685a\" (UID: \"496da619-6355-481d-8414-ea68f5e7685a\") " Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.884154 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-utilities" (OuterVolumeSpecName: "utilities") pod "496da619-6355-481d-8414-ea68f5e7685a" (UID: "496da619-6355-481d-8414-ea68f5e7685a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.889768 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496da619-6355-481d-8414-ea68f5e7685a-kube-api-access-f9jc7" (OuterVolumeSpecName: "kube-api-access-f9jc7") pod "496da619-6355-481d-8414-ea68f5e7685a" (UID: "496da619-6355-481d-8414-ea68f5e7685a"). InnerVolumeSpecName "kube-api-access-f9jc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.984545 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9jc7\" (UniqueName: \"kubernetes.io/projected/496da619-6355-481d-8414-ea68f5e7685a-kube-api-access-f9jc7\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:47 crc kubenswrapper[4861]: I1003 13:43:47.984855 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.451594 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" event={"ID":"cecb92af-dba6-4e7b-825c-97d7fcd5cc5d","Type":"ContainerStarted","Data":"6c6921dff04fa5eb2277b59555fe264ae69e5f07bafecefdd37411548d3e9e49"} Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.452209 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.459503 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-thvxb" event={"ID":"c1ec679d-6458-489b-bdb3-6c6ec465d695","Type":"ContainerStarted","Data":"48f1aacf6cedf24ffa1a73f823d06dffdb2efbcd1d2ccb38225c8e0cca34136a"} Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.460010 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.462566 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" event={"ID":"74a61ffa-6414-4072-952f-d3a9e5df2cad","Type":"ContainerStarted","Data":"7acdd86f2b84b5b404f2c47a1944f2f016f7f6cb6ba29d07c1c0d54eeabd7d94"} Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.478192 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rb8d2" event={"ID":"496da619-6355-481d-8414-ea68f5e7685a","Type":"ContainerDied","Data":"af06e049c7917fe9b68fa984e0f18e5819539d7c1d7ffc5395fa4311caac4a8a"} Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.478288 4861 scope.go:117] "RemoveContainer" containerID="141a6e36d974bc9c702dad707fe17a429155abc74d8c7fa5c4a31ad794d7efea" Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.478398 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rb8d2" Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.495680 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" podStartSLOduration=2.294496362 podStartE2EDuration="4.495655158s" podCreationTimestamp="2025-10-03 13:43:44 +0000 UTC" firstStartedPulling="2025-10-03 13:43:45.676734279 +0000 UTC m=+739.674719336" lastFinishedPulling="2025-10-03 13:43:47.877893085 +0000 UTC m=+741.875878132" observedRunningTime="2025-10-03 13:43:48.476748695 +0000 UTC m=+742.474733742" watchObservedRunningTime="2025-10-03 13:43:48.495655158 +0000 UTC m=+742.493640205" Oct 03 13:43:48 crc kubenswrapper[4861]: I1003 13:43:48.496966 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-thvxb" podStartSLOduration=1.3643971289999999 podStartE2EDuration="4.496959173s" podCreationTimestamp="2025-10-03 13:43:44 +0000 UTC" firstStartedPulling="2025-10-03 13:43:44.730561598 +0000 UTC m=+738.728546645" lastFinishedPulling="2025-10-03 13:43:47.863123652 +0000 UTC m=+741.861108689" observedRunningTime="2025-10-03 13:43:48.494327773 +0000 UTC m=+742.492312830" watchObservedRunningTime="2025-10-03 13:43:48.496959173 +0000 UTC m=+742.494944220" Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.068565 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "496da619-6355-481d-8414-ea68f5e7685a" (UID: "496da619-6355-481d-8414-ea68f5e7685a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.083487 4861 scope.go:117] "RemoveContainer" containerID="dcfd744d4f5b0a7ff0aff43aee854aaff541908b4d7298bae8663d836bcc7a45" Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.100534 4861 scope.go:117] "RemoveContainer" containerID="c0d243c4a8fbea7d64063096e74b91a1f6b0553650ca51ca68d9468425e7a393" Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.101659 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496da619-6355-481d-8414-ea68f5e7685a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.322535 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rb8d2"] Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.325309 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rb8d2"] Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.490490 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" event={"ID":"767a82d2-0bea-436b-b63a-c5bbf0de86b8","Type":"ContainerStarted","Data":"a8bd1fcadf5249f8c159a25a45a3eeba8c68097ef87da88fb35f405d99c655cd"} Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.521084 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-jmr9f" podStartSLOduration=1.700895764 podStartE2EDuration="6.521063919s" podCreationTimestamp="2025-10-03 13:43:44 +0000 UTC" firstStartedPulling="2025-10-03 13:43:45.351770683 +0000 UTC m=+739.349755730" lastFinishedPulling="2025-10-03 13:43:50.171938838 +0000 UTC m=+744.169923885" observedRunningTime="2025-10-03 13:43:50.507963629 +0000 UTC m=+744.505948696" watchObservedRunningTime="2025-10-03 13:43:50.521063919 +0000 UTC m=+744.519048966" Oct 03 13:43:50 crc kubenswrapper[4861]: I1003 13:43:50.695094 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496da619-6355-481d-8414-ea68f5e7685a" path="/var/lib/kubelet/pods/496da619-6355-481d-8414-ea68f5e7685a/volumes" Oct 03 13:43:51 crc kubenswrapper[4861]: I1003 13:43:51.502041 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" event={"ID":"74a61ffa-6414-4072-952f-d3a9e5df2cad","Type":"ContainerStarted","Data":"477aa501fbd303c51f053d6bf41f36c0fc428ebe70684a1b7d5d93a13a9be37a"} Oct 03 13:43:51 crc kubenswrapper[4861]: I1003 13:43:51.521148 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-b4mxl" podStartSLOduration=1.558509607 podStartE2EDuration="7.521129427s" podCreationTimestamp="2025-10-03 13:43:44 +0000 UTC" firstStartedPulling="2025-10-03 13:43:45.281664754 +0000 UTC m=+739.279649801" lastFinishedPulling="2025-10-03 13:43:51.244284574 +0000 UTC m=+745.242269621" observedRunningTime="2025-10-03 13:43:51.519682828 +0000 UTC m=+745.517667895" watchObservedRunningTime="2025-10-03 13:43:51.521129427 +0000 UTC m=+745.519114474" Oct 03 13:43:54 crc kubenswrapper[4861]: I1003 13:43:54.694950 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-thvxb" Oct 03 13:43:55 crc kubenswrapper[4861]: I1003 13:43:55.131331 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:55 crc kubenswrapper[4861]: I1003 13:43:55.131392 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:55 crc kubenswrapper[4861]: I1003 13:43:55.138086 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:55 crc kubenswrapper[4861]: I1003 13:43:55.527633 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5557b87f9c-zzzj9" Oct 03 13:43:55 crc kubenswrapper[4861]: I1003 13:43:55.576244 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-v6tz9"] Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.145195 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.145625 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.145673 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.146245 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d80b8d3116622b7c143b4b78ef1cc92a62fa6958e50f67859a9a0dce0c925ac5"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.146315 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://d80b8d3116622b7c143b4b78ef1cc92a62fa6958e50f67859a9a0dce0c925ac5" gracePeriod=600 Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.549453 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="d80b8d3116622b7c143b4b78ef1cc92a62fa6958e50f67859a9a0dce0c925ac5" exitCode=0 Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.549610 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"d80b8d3116622b7c143b4b78ef1cc92a62fa6958e50f67859a9a0dce0c925ac5"} Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.549833 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"d8c9692c5afc364d3518624df673f7e65a1fe92e46a015f8a19e45916a42b14c"} Oct 03 13:44:00 crc kubenswrapper[4861]: I1003 13:44:00.549914 4861 scope.go:117] "RemoveContainer" containerID="e480ef4df69d33dab6234057237660f52eab1528687ed0bfdbfbb9854b248c93" Oct 03 13:44:05 crc kubenswrapper[4861]: I1003 13:44:05.251980 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v6q9r" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.946292 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt"] Oct 03 13:44:18 crc kubenswrapper[4861]: E1003 13:44:18.947209 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="registry-server" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.947224 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="registry-server" Oct 03 13:44:18 crc kubenswrapper[4861]: E1003 13:44:18.947262 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="extract-content" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.947271 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="extract-content" Oct 03 13:44:18 crc kubenswrapper[4861]: E1003 13:44:18.947287 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="extract-utilities" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.947295 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="extract-utilities" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.947453 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="496da619-6355-481d-8414-ea68f5e7685a" containerName="registry-server" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.948352 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.953176 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 13:44:18 crc kubenswrapper[4861]: I1003 13:44:18.962926 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt"] Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.019971 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.020045 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfh89\" (UniqueName: \"kubernetes.io/projected/b2677784-5297-4d5b-8558-e904b9668fa5-kube-api-access-rfh89\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.020096 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.120750 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.120837 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.120870 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfh89\" (UniqueName: \"kubernetes.io/projected/b2677784-5297-4d5b-8558-e904b9668fa5-kube-api-access-rfh89\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.121363 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.121484 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.139171 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfh89\" (UniqueName: \"kubernetes.io/projected/b2677784-5297-4d5b-8558-e904b9668fa5-kube-api-access-rfh89\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.266041 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:19 crc kubenswrapper[4861]: I1003 13:44:19.661127 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt"] Oct 03 13:44:19 crc kubenswrapper[4861]: W1003 13:44:19.669219 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2677784_5297_4d5b_8558_e904b9668fa5.slice/crio-e0a89155379e571911a39791a856ff0cfb2e5c153baf4e99ce2adaa2e64eb61c WatchSource:0}: Error finding container e0a89155379e571911a39791a856ff0cfb2e5c153baf4e99ce2adaa2e64eb61c: Status 404 returned error can't find the container with id e0a89155379e571911a39791a856ff0cfb2e5c153baf4e99ce2adaa2e64eb61c Oct 03 13:44:20 crc kubenswrapper[4861]: I1003 13:44:20.630531 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-v6tz9" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerName="console" containerID="cri-o://80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2" gracePeriod=15 Oct 03 13:44:20 crc kubenswrapper[4861]: I1003 13:44:20.664217 4861 generic.go:334] "Generic (PLEG): container finished" podID="b2677784-5297-4d5b-8558-e904b9668fa5" containerID="e18c8d4c86fd3ab6801ad2f741894a1cdda483455f5e166e10c7c8eb34677601" exitCode=0 Oct 03 13:44:20 crc kubenswrapper[4861]: I1003 13:44:20.664304 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" event={"ID":"b2677784-5297-4d5b-8558-e904b9668fa5","Type":"ContainerDied","Data":"e18c8d4c86fd3ab6801ad2f741894a1cdda483455f5e166e10c7c8eb34677601"} Oct 03 13:44:20 crc kubenswrapper[4861]: I1003 13:44:20.664355 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" event={"ID":"b2677784-5297-4d5b-8558-e904b9668fa5","Type":"ContainerStarted","Data":"e0a89155379e571911a39791a856ff0cfb2e5c153baf4e99ce2adaa2e64eb61c"} Oct 03 13:44:20 crc kubenswrapper[4861]: I1003 13:44:20.989342 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-v6tz9_35c87388-97eb-44ff-91d6-6e9b9cfaa6a1/console/0.log" Oct 03 13:44:20 crc kubenswrapper[4861]: I1003 13:44:20.989608 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050296 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-oauth-config\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050337 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-oauth-serving-cert\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050367 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-config\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050385 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49pjb\" (UniqueName: \"kubernetes.io/projected/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-kube-api-access-49pjb\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050436 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-trusted-ca-bundle\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050469 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-serving-cert\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.050488 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-service-ca\") pod \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\" (UID: \"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1\") " Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.051438 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.051521 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-config" (OuterVolumeSpecName: "console-config") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.051849 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-service-ca" (OuterVolumeSpecName: "service-ca") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.052062 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.056553 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.057197 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.063501 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-kube-api-access-49pjb" (OuterVolumeSpecName: "kube-api-access-49pjb") pod "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" (UID: "35c87388-97eb-44ff-91d6-6e9b9cfaa6a1"). InnerVolumeSpecName "kube-api-access-49pjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.152925 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49pjb\" (UniqueName: \"kubernetes.io/projected/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-kube-api-access-49pjb\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.152977 4861 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.152993 4861 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.153004 4861 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.153015 4861 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.153026 4861 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.153039 4861 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.674547 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-v6tz9_35c87388-97eb-44ff-91d6-6e9b9cfaa6a1/console/0.log" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.674601 4861 generic.go:334] "Generic (PLEG): container finished" podID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerID="80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2" exitCode=2 Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.674633 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v6tz9" event={"ID":"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1","Type":"ContainerDied","Data":"80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2"} Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.674660 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v6tz9" event={"ID":"35c87388-97eb-44ff-91d6-6e9b9cfaa6a1","Type":"ContainerDied","Data":"a3d0110a3237c4ce9031546a1c0bf63697f314adbf9d8190a434dde58f7d2eca"} Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.674681 4861 scope.go:117] "RemoveContainer" containerID="80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.674791 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v6tz9" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.712836 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-v6tz9"] Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.718319 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-v6tz9"] Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.726476 4861 scope.go:117] "RemoveContainer" containerID="80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2" Oct 03 13:44:21 crc kubenswrapper[4861]: E1003 13:44:21.726952 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2\": container with ID starting with 80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2 not found: ID does not exist" containerID="80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2" Oct 03 13:44:21 crc kubenswrapper[4861]: I1003 13:44:21.726987 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2"} err="failed to get container status \"80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2\": rpc error: code = NotFound desc = could not find container \"80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2\": container with ID starting with 80706c4f0bc3f6875cd066aca77b1148571763260f4cfd1558f45b14ac3af6b2 not found: ID does not exist" Oct 03 13:44:22 crc kubenswrapper[4861]: I1003 13:44:22.681520 4861 generic.go:334] "Generic (PLEG): container finished" podID="b2677784-5297-4d5b-8558-e904b9668fa5" containerID="a682cf7a9d277206ac53bc1581f0af565567f783481726f79d0e222494a9fab6" exitCode=0 Oct 03 13:44:22 crc kubenswrapper[4861]: I1003 13:44:22.691483 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" path="/var/lib/kubelet/pods/35c87388-97eb-44ff-91d6-6e9b9cfaa6a1/volumes" Oct 03 13:44:22 crc kubenswrapper[4861]: I1003 13:44:22.692199 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" event={"ID":"b2677784-5297-4d5b-8558-e904b9668fa5","Type":"ContainerDied","Data":"a682cf7a9d277206ac53bc1581f0af565567f783481726f79d0e222494a9fab6"} Oct 03 13:44:23 crc kubenswrapper[4861]: I1003 13:44:23.688759 4861 generic.go:334] "Generic (PLEG): container finished" podID="b2677784-5297-4d5b-8558-e904b9668fa5" containerID="9030e5994b5d2b8b07c3f9ed7e2c755d8b41e577042b6028fe585529452203e1" exitCode=0 Oct 03 13:44:23 crc kubenswrapper[4861]: I1003 13:44:23.688805 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" event={"ID":"b2677784-5297-4d5b-8558-e904b9668fa5","Type":"ContainerDied","Data":"9030e5994b5d2b8b07c3f9ed7e2c755d8b41e577042b6028fe585529452203e1"} Oct 03 13:44:24 crc kubenswrapper[4861]: I1003 13:44:24.929431 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.039419 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfh89\" (UniqueName: \"kubernetes.io/projected/b2677784-5297-4d5b-8558-e904b9668fa5-kube-api-access-rfh89\") pod \"b2677784-5297-4d5b-8558-e904b9668fa5\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.039522 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-util\") pod \"b2677784-5297-4d5b-8558-e904b9668fa5\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.039551 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-bundle\") pod \"b2677784-5297-4d5b-8558-e904b9668fa5\" (UID: \"b2677784-5297-4d5b-8558-e904b9668fa5\") " Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.040669 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-bundle" (OuterVolumeSpecName: "bundle") pod "b2677784-5297-4d5b-8558-e904b9668fa5" (UID: "b2677784-5297-4d5b-8558-e904b9668fa5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.045680 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2677784-5297-4d5b-8558-e904b9668fa5-kube-api-access-rfh89" (OuterVolumeSpecName: "kube-api-access-rfh89") pod "b2677784-5297-4d5b-8558-e904b9668fa5" (UID: "b2677784-5297-4d5b-8558-e904b9668fa5"). InnerVolumeSpecName "kube-api-access-rfh89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.140691 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfh89\" (UniqueName: \"kubernetes.io/projected/b2677784-5297-4d5b-8558-e904b9668fa5-kube-api-access-rfh89\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.140726 4861 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.596126 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-util" (OuterVolumeSpecName: "util") pod "b2677784-5297-4d5b-8558-e904b9668fa5" (UID: "b2677784-5297-4d5b-8558-e904b9668fa5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.647285 4861 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2677784-5297-4d5b-8558-e904b9668fa5-util\") on node \"crc\" DevicePath \"\"" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.704000 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" event={"ID":"b2677784-5297-4d5b-8558-e904b9668fa5","Type":"ContainerDied","Data":"e0a89155379e571911a39791a856ff0cfb2e5c153baf4e99ce2adaa2e64eb61c"} Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.704053 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0a89155379e571911a39791a856ff0cfb2e5c153baf4e99ce2adaa2e64eb61c" Oct 03 13:44:25 crc kubenswrapper[4861]: I1003 13:44:25.704135 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.950527 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn"] Oct 03 13:44:33 crc kubenswrapper[4861]: E1003 13:44:33.951461 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="extract" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.951476 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="extract" Oct 03 13:44:33 crc kubenswrapper[4861]: E1003 13:44:33.951490 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="util" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.951496 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="util" Oct 03 13:44:33 crc kubenswrapper[4861]: E1003 13:44:33.951509 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="pull" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.951515 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="pull" Oct 03 13:44:33 crc kubenswrapper[4861]: E1003 13:44:33.951524 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerName="console" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.951529 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerName="console" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.951612 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="35c87388-97eb-44ff-91d6-6e9b9cfaa6a1" containerName="console" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.951623 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2677784-5297-4d5b-8558-e904b9668fa5" containerName="extract" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.952020 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.955409 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.955537 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.960015 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-7xqn2" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.960063 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.962763 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 03 13:44:33 crc kubenswrapper[4861]: I1003 13:44:33.974391 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn"] Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.086693 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9gzb\" (UniqueName: \"kubernetes.io/projected/03b31621-6432-424e-a55f-aecaf846b082-kube-api-access-c9gzb\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.086736 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/03b31621-6432-424e-a55f-aecaf846b082-apiservice-cert\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.086782 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/03b31621-6432-424e-a55f-aecaf846b082-webhook-cert\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.188554 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/03b31621-6432-424e-a55f-aecaf846b082-webhook-cert\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.188645 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9gzb\" (UniqueName: \"kubernetes.io/projected/03b31621-6432-424e-a55f-aecaf846b082-kube-api-access-c9gzb\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.188668 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/03b31621-6432-424e-a55f-aecaf846b082-apiservice-cert\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.195660 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/03b31621-6432-424e-a55f-aecaf846b082-apiservice-cert\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.210392 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/03b31621-6432-424e-a55f-aecaf846b082-webhook-cert\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.220728 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6"] Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.221444 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.224565 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-b9pnt" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.226207 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.227073 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.228219 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9gzb\" (UniqueName: \"kubernetes.io/projected/03b31621-6432-424e-a55f-aecaf846b082-kube-api-access-c9gzb\") pod \"metallb-operator-controller-manager-5dbf4fd78b-2d6hn\" (UID: \"03b31621-6432-424e-a55f-aecaf846b082\") " pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.246121 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6"] Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.274151 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.289956 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f8fb711a-85c6-4014-8f23-7edabc8faf74-webhook-cert\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.290040 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f8fb711a-85c6-4014-8f23-7edabc8faf74-apiservice-cert\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.290086 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69fwd\" (UniqueName: \"kubernetes.io/projected/f8fb711a-85c6-4014-8f23-7edabc8faf74-kube-api-access-69fwd\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.391326 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f8fb711a-85c6-4014-8f23-7edabc8faf74-apiservice-cert\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.391408 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69fwd\" (UniqueName: \"kubernetes.io/projected/f8fb711a-85c6-4014-8f23-7edabc8faf74-kube-api-access-69fwd\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.391468 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f8fb711a-85c6-4014-8f23-7edabc8faf74-webhook-cert\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.397173 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f8fb711a-85c6-4014-8f23-7edabc8faf74-apiservice-cert\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.398936 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f8fb711a-85c6-4014-8f23-7edabc8faf74-webhook-cert\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.433999 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69fwd\" (UniqueName: \"kubernetes.io/projected/f8fb711a-85c6-4014-8f23-7edabc8faf74-kube-api-access-69fwd\") pod \"metallb-operator-webhook-server-5b7c788d54-q5fg6\" (UID: \"f8fb711a-85c6-4014-8f23-7edabc8faf74\") " pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.580700 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.737977 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn"] Oct 03 13:44:34 crc kubenswrapper[4861]: W1003 13:44:34.757550 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03b31621_6432_424e_a55f_aecaf846b082.slice/crio-a70c64ececcbe1019166554da51c8147930cef318204d381602fca3a898033ff WatchSource:0}: Error finding container a70c64ececcbe1019166554da51c8147930cef318204d381602fca3a898033ff: Status 404 returned error can't find the container with id a70c64ececcbe1019166554da51c8147930cef318204d381602fca3a898033ff Oct 03 13:44:34 crc kubenswrapper[4861]: I1003 13:44:34.838964 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" event={"ID":"03b31621-6432-424e-a55f-aecaf846b082","Type":"ContainerStarted","Data":"a70c64ececcbe1019166554da51c8147930cef318204d381602fca3a898033ff"} Oct 03 13:44:35 crc kubenswrapper[4861]: I1003 13:44:35.183293 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6"] Oct 03 13:44:35 crc kubenswrapper[4861]: W1003 13:44:35.193206 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8fb711a_85c6_4014_8f23_7edabc8faf74.slice/crio-8f5cd77a005503bcafeb306805713217db97de549df3f7454c9b5acc4f2af40b WatchSource:0}: Error finding container 8f5cd77a005503bcafeb306805713217db97de549df3f7454c9b5acc4f2af40b: Status 404 returned error can't find the container with id 8f5cd77a005503bcafeb306805713217db97de549df3f7454c9b5acc4f2af40b Oct 03 13:44:35 crc kubenswrapper[4861]: I1003 13:44:35.856790 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" event={"ID":"f8fb711a-85c6-4014-8f23-7edabc8faf74","Type":"ContainerStarted","Data":"8f5cd77a005503bcafeb306805713217db97de549df3f7454c9b5acc4f2af40b"} Oct 03 13:44:43 crc kubenswrapper[4861]: I1003 13:44:43.906300 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" event={"ID":"f8fb711a-85c6-4014-8f23-7edabc8faf74","Type":"ContainerStarted","Data":"26112fd22b2be894c23a269d4f4f0e31fb33e141d5b80e62f5a07e907b28d786"} Oct 03 13:44:43 crc kubenswrapper[4861]: I1003 13:44:43.906910 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:44:43 crc kubenswrapper[4861]: I1003 13:44:43.907973 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" event={"ID":"03b31621-6432-424e-a55f-aecaf846b082","Type":"ContainerStarted","Data":"56a754cb1c83b3fb9424dc92a0cb1d0bfb8e1f7dd427b0c4d9984280bf13b844"} Oct 03 13:44:43 crc kubenswrapper[4861]: I1003 13:44:43.908638 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:44:43 crc kubenswrapper[4861]: I1003 13:44:43.930988 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" podStartSLOduration=1.9331730839999999 podStartE2EDuration="9.930974036s" podCreationTimestamp="2025-10-03 13:44:34 +0000 UTC" firstStartedPulling="2025-10-03 13:44:35.196733195 +0000 UTC m=+789.194718242" lastFinishedPulling="2025-10-03 13:44:43.194534107 +0000 UTC m=+797.192519194" observedRunningTime="2025-10-03 13:44:43.923465505 +0000 UTC m=+797.921450622" watchObservedRunningTime="2025-10-03 13:44:43.930974036 +0000 UTC m=+797.928959083" Oct 03 13:44:43 crc kubenswrapper[4861]: I1003 13:44:43.950093 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" podStartSLOduration=2.536705275 podStartE2EDuration="10.950056465s" podCreationTimestamp="2025-10-03 13:44:33 +0000 UTC" firstStartedPulling="2025-10-03 13:44:34.761752169 +0000 UTC m=+788.759737226" lastFinishedPulling="2025-10-03 13:44:43.175103349 +0000 UTC m=+797.173088416" observedRunningTime="2025-10-03 13:44:43.944862477 +0000 UTC m=+797.942847524" watchObservedRunningTime="2025-10-03 13:44:43.950056465 +0000 UTC m=+797.948041512" Oct 03 13:44:54 crc kubenswrapper[4861]: I1003 13:44:54.586955 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5b7c788d54-q5fg6" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.136609 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz"] Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.137799 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.139453 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.140864 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.149888 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz"] Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.230434 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a77c936-63c4-4e96-aa36-7616663f398e-secret-volume\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.230501 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a77c936-63c4-4e96-aa36-7616663f398e-config-volume\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.230570 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-279gb\" (UniqueName: \"kubernetes.io/projected/5a77c936-63c4-4e96-aa36-7616663f398e-kube-api-access-279gb\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.331623 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-279gb\" (UniqueName: \"kubernetes.io/projected/5a77c936-63c4-4e96-aa36-7616663f398e-kube-api-access-279gb\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.332008 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a77c936-63c4-4e96-aa36-7616663f398e-secret-volume\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.332201 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a77c936-63c4-4e96-aa36-7616663f398e-config-volume\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.333175 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a77c936-63c4-4e96-aa36-7616663f398e-config-volume\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.341200 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a77c936-63c4-4e96-aa36-7616663f398e-secret-volume\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.353093 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-279gb\" (UniqueName: \"kubernetes.io/projected/5a77c936-63c4-4e96-aa36-7616663f398e-kube-api-access-279gb\") pod \"collect-profiles-29324985-v2nnz\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.458254 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:00 crc kubenswrapper[4861]: I1003 13:45:00.893198 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz"] Oct 03 13:45:00 crc kubenswrapper[4861]: W1003 13:45:00.901875 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a77c936_63c4_4e96_aa36_7616663f398e.slice/crio-84108cb049768026c61c0cd82f7d0d8de04fde90bd7f993ac1eac4fd342ff13d WatchSource:0}: Error finding container 84108cb049768026c61c0cd82f7d0d8de04fde90bd7f993ac1eac4fd342ff13d: Status 404 returned error can't find the container with id 84108cb049768026c61c0cd82f7d0d8de04fde90bd7f993ac1eac4fd342ff13d Oct 03 13:45:01 crc kubenswrapper[4861]: I1003 13:45:01.008223 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" event={"ID":"5a77c936-63c4-4e96-aa36-7616663f398e","Type":"ContainerStarted","Data":"84108cb049768026c61c0cd82f7d0d8de04fde90bd7f993ac1eac4fd342ff13d"} Oct 03 13:45:02 crc kubenswrapper[4861]: I1003 13:45:02.015457 4861 generic.go:334] "Generic (PLEG): container finished" podID="5a77c936-63c4-4e96-aa36-7616663f398e" containerID="180e2ac430ddc953196a9766ff02af22fe2178bd1fe1a156cdcdf8fe29c5b54a" exitCode=0 Oct 03 13:45:02 crc kubenswrapper[4861]: I1003 13:45:02.015566 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" event={"ID":"5a77c936-63c4-4e96-aa36-7616663f398e","Type":"ContainerDied","Data":"180e2ac430ddc953196a9766ff02af22fe2178bd1fe1a156cdcdf8fe29c5b54a"} Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.295542 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.471452 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a77c936-63c4-4e96-aa36-7616663f398e-config-volume\") pod \"5a77c936-63c4-4e96-aa36-7616663f398e\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.471528 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279gb\" (UniqueName: \"kubernetes.io/projected/5a77c936-63c4-4e96-aa36-7616663f398e-kube-api-access-279gb\") pod \"5a77c936-63c4-4e96-aa36-7616663f398e\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.472323 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a77c936-63c4-4e96-aa36-7616663f398e-config-volume" (OuterVolumeSpecName: "config-volume") pod "5a77c936-63c4-4e96-aa36-7616663f398e" (UID: "5a77c936-63c4-4e96-aa36-7616663f398e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.472734 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a77c936-63c4-4e96-aa36-7616663f398e-secret-volume\") pod \"5a77c936-63c4-4e96-aa36-7616663f398e\" (UID: \"5a77c936-63c4-4e96-aa36-7616663f398e\") " Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.473043 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5a77c936-63c4-4e96-aa36-7616663f398e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.479450 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a77c936-63c4-4e96-aa36-7616663f398e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5a77c936-63c4-4e96-aa36-7616663f398e" (UID: "5a77c936-63c4-4e96-aa36-7616663f398e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.487526 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a77c936-63c4-4e96-aa36-7616663f398e-kube-api-access-279gb" (OuterVolumeSpecName: "kube-api-access-279gb") pod "5a77c936-63c4-4e96-aa36-7616663f398e" (UID: "5a77c936-63c4-4e96-aa36-7616663f398e"). InnerVolumeSpecName "kube-api-access-279gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.574428 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5a77c936-63c4-4e96-aa36-7616663f398e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:03 crc kubenswrapper[4861]: I1003 13:45:03.574477 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279gb\" (UniqueName: \"kubernetes.io/projected/5a77c936-63c4-4e96-aa36-7616663f398e-kube-api-access-279gb\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:04 crc kubenswrapper[4861]: I1003 13:45:04.027832 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" event={"ID":"5a77c936-63c4-4e96-aa36-7616663f398e","Type":"ContainerDied","Data":"84108cb049768026c61c0cd82f7d0d8de04fde90bd7f993ac1eac4fd342ff13d"} Oct 03 13:45:04 crc kubenswrapper[4861]: I1003 13:45:04.027876 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84108cb049768026c61c0cd82f7d0d8de04fde90bd7f993ac1eac4fd342ff13d" Oct 03 13:45:04 crc kubenswrapper[4861]: I1003 13:45:04.027949 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.249887 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7lvf7"] Oct 03 13:45:09 crc kubenswrapper[4861]: E1003 13:45:09.250610 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a77c936-63c4-4e96-aa36-7616663f398e" containerName="collect-profiles" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.250624 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a77c936-63c4-4e96-aa36-7616663f398e" containerName="collect-profiles" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.250726 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a77c936-63c4-4e96-aa36-7616663f398e" containerName="collect-profiles" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.251452 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.266725 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7lvf7"] Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.344824 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w846c\" (UniqueName: \"kubernetes.io/projected/0c547d33-c876-4c98-b1ce-a61325cedd45-kube-api-access-w846c\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.344891 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-catalog-content\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.344936 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-utilities\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.445709 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-catalog-content\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.446025 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-utilities\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.446159 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w846c\" (UniqueName: \"kubernetes.io/projected/0c547d33-c876-4c98-b1ce-a61325cedd45-kube-api-access-w846c\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.446639 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-catalog-content\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.446713 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-utilities\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.466063 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w846c\" (UniqueName: \"kubernetes.io/projected/0c547d33-c876-4c98-b1ce-a61325cedd45-kube-api-access-w846c\") pod \"community-operators-7lvf7\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:09 crc kubenswrapper[4861]: I1003 13:45:09.568892 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:10 crc kubenswrapper[4861]: I1003 13:45:10.432383 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7lvf7"] Oct 03 13:45:11 crc kubenswrapper[4861]: I1003 13:45:11.069338 4861 generic.go:334] "Generic (PLEG): container finished" podID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerID="30a549db6728b13f808ac91415818c9805ecb9c4f2aa39cb1f6fe254cf69b313" exitCode=0 Oct 03 13:45:11 crc kubenswrapper[4861]: I1003 13:45:11.069396 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerDied","Data":"30a549db6728b13f808ac91415818c9805ecb9c4f2aa39cb1f6fe254cf69b313"} Oct 03 13:45:11 crc kubenswrapper[4861]: I1003 13:45:11.069434 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerStarted","Data":"f1e83a9d4ad8508d486e48f2cfd1df63683f655ce79185b8991a89c9ee31dfc9"} Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.078298 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerStarted","Data":"5c4283ee99b102c851265075b1f36a68f1bb7afa9b621d8219a60c1c65e1685a"} Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.233949 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pjp8c"] Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.235285 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.251471 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjp8c"] Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.383960 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-utilities\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.384017 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-catalog-content\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.384088 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4nx8\" (UniqueName: \"kubernetes.io/projected/1d830b12-c365-42ab-a9dc-583b4b3fabe9-kube-api-access-s4nx8\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.485251 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-utilities\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.485307 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-catalog-content\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.485357 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4nx8\" (UniqueName: \"kubernetes.io/projected/1d830b12-c365-42ab-a9dc-583b4b3fabe9-kube-api-access-s4nx8\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.485735 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-utilities\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.485821 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-catalog-content\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.516981 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4nx8\" (UniqueName: \"kubernetes.io/projected/1d830b12-c365-42ab-a9dc-583b4b3fabe9-kube-api-access-s4nx8\") pod \"redhat-marketplace-pjp8c\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.552968 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:12 crc kubenswrapper[4861]: I1003 13:45:12.950849 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjp8c"] Oct 03 13:45:13 crc kubenswrapper[4861]: I1003 13:45:13.084354 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjp8c" event={"ID":"1d830b12-c365-42ab-a9dc-583b4b3fabe9","Type":"ContainerStarted","Data":"de71a00c40e329efd3511f3ed5f0198e27af50c0a29dc55643efde6502003cef"} Oct 03 13:45:13 crc kubenswrapper[4861]: I1003 13:45:13.086555 4861 generic.go:334] "Generic (PLEG): container finished" podID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerID="5c4283ee99b102c851265075b1f36a68f1bb7afa9b621d8219a60c1c65e1685a" exitCode=0 Oct 03 13:45:13 crc kubenswrapper[4861]: I1003 13:45:13.086594 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerDied","Data":"5c4283ee99b102c851265075b1f36a68f1bb7afa9b621d8219a60c1c65e1685a"} Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.094112 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerStarted","Data":"5f91ad9b20d752875b9c2a97e9b8911905dbe3c61d0872c9eedb3a4e389bd737"} Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.095821 4861 generic.go:334] "Generic (PLEG): container finished" podID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerID="0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0" exitCode=0 Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.095860 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjp8c" event={"ID":"1d830b12-c365-42ab-a9dc-583b4b3fabe9","Type":"ContainerDied","Data":"0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0"} Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.117578 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7lvf7" podStartSLOduration=2.656782926 podStartE2EDuration="5.117554352s" podCreationTimestamp="2025-10-03 13:45:09 +0000 UTC" firstStartedPulling="2025-10-03 13:45:11.070889084 +0000 UTC m=+825.068874151" lastFinishedPulling="2025-10-03 13:45:13.53166053 +0000 UTC m=+827.529645577" observedRunningTime="2025-10-03 13:45:14.113960777 +0000 UTC m=+828.111945824" watchObservedRunningTime="2025-10-03 13:45:14.117554352 +0000 UTC m=+828.115539399" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.276549 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5dbf4fd78b-2d6hn" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.936995 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-xxhsl"] Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.939112 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.941463 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-ftfsf" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.941730 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.944886 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.962899 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p"] Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.964001 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.965616 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 03 13:45:14 crc kubenswrapper[4861]: I1003 13:45:14.973198 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p"] Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020314 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-startup\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020368 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvxrd\" (UniqueName: \"kubernetes.io/projected/f3324863-98d8-44d0-91dc-4bb8dc243a8f-kube-api-access-mvxrd\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020400 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-sockets\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020430 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9480abea-a1f1-4416-880b-8fb72fd8716b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020468 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-reloader\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020492 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020520 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-conf\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020541 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.020566 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glvgc\" (UniqueName: \"kubernetes.io/projected/9480abea-a1f1-4416-880b-8fb72fd8716b-kube-api-access-glvgc\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.102660 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-cm757"] Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.103562 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.106591 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-vdx27"] Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.107337 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121503 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-startup\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121553 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvxrd\" (UniqueName: \"kubernetes.io/projected/f3324863-98d8-44d0-91dc-4bb8dc243a8f-kube-api-access-mvxrd\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121608 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-sockets\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121691 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9480abea-a1f1-4416-880b-8fb72fd8716b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121734 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-reloader\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121757 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121783 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-conf\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121804 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.121832 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glvgc\" (UniqueName: \"kubernetes.io/projected/9480abea-a1f1-4416-880b-8fb72fd8716b-kube-api-access-glvgc\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.122075 4861 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.122110 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-sockets\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.122152 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9480abea-a1f1-4416-880b-8fb72fd8716b-cert podName:9480abea-a1f1-4416-880b-8fb72fd8716b nodeName:}" failed. No retries permitted until 2025-10-03 13:45:15.622132286 +0000 UTC m=+829.620117323 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9480abea-a1f1-4416-880b-8fb72fd8716b-cert") pod "frr-k8s-webhook-server-64bf5d555-jn69p" (UID: "9480abea-a1f1-4416-880b-8fb72fd8716b") : secret "frr-k8s-webhook-server-cert" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.122246 4861 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.122255 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.122293 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs podName:f3324863-98d8-44d0-91dc-4bb8dc243a8f nodeName:}" failed. No retries permitted until 2025-10-03 13:45:15.6222785 +0000 UTC m=+829.620263547 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs") pod "frr-k8s-xxhsl" (UID: "f3324863-98d8-44d0-91dc-4bb8dc243a8f") : secret "frr-k8s-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.122349 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-conf\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.122454 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f3324863-98d8-44d0-91dc-4bb8dc243a8f-reloader\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.122765 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f3324863-98d8-44d0-91dc-4bb8dc243a8f-frr-startup\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.130949 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.130949 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.142731 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.142731 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.142878 4861 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-qcns5" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.174703 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-cm757"] Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.183132 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvxrd\" (UniqueName: \"kubernetes.io/projected/f3324863-98d8-44d0-91dc-4bb8dc243a8f-kube-api-access-mvxrd\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.183913 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glvgc\" (UniqueName: \"kubernetes.io/projected/9480abea-a1f1-4416-880b-8fb72fd8716b-kube-api-access-glvgc\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.223827 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-metrics-certs\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.223871 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvgzb\" (UniqueName: \"kubernetes.io/projected/6d48b95e-8866-4456-95c0-4c3b728f6f93-kube-api-access-fvgzb\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.223901 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6d48b95e-8866-4456-95c0-4c3b728f6f93-metallb-excludel2\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.223927 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2fpl\" (UniqueName: \"kubernetes.io/projected/6aca0805-4feb-4b13-8b46-f41858176496-kube-api-access-d2fpl\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.223960 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.223989 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-cert\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.224040 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-metrics-certs\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325468 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-metrics-certs\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325540 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-metrics-certs\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325568 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvgzb\" (UniqueName: \"kubernetes.io/projected/6d48b95e-8866-4456-95c0-4c3b728f6f93-kube-api-access-fvgzb\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325594 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6d48b95e-8866-4456-95c0-4c3b728f6f93-metallb-excludel2\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.325662 4861 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.325724 4861 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.325739 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-metrics-certs podName:6aca0805-4feb-4b13-8b46-f41858176496 nodeName:}" failed. No retries permitted until 2025-10-03 13:45:15.825719858 +0000 UTC m=+829.823704945 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-metrics-certs") pod "controller-68d546b9d8-cm757" (UID: "6aca0805-4feb-4b13-8b46-f41858176496") : secret "controller-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.325772 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-metrics-certs podName:6d48b95e-8866-4456-95c0-4c3b728f6f93 nodeName:}" failed. No retries permitted until 2025-10-03 13:45:15.825758039 +0000 UTC m=+829.823743086 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-metrics-certs") pod "speaker-vdx27" (UID: "6d48b95e-8866-4456-95c0-4c3b728f6f93") : secret "speaker-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325618 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2fpl\" (UniqueName: \"kubernetes.io/projected/6aca0805-4feb-4b13-8b46-f41858176496-kube-api-access-d2fpl\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325838 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.325874 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-cert\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.325966 4861 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.326054 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist podName:6d48b95e-8866-4456-95c0-4c3b728f6f93 nodeName:}" failed. No retries permitted until 2025-10-03 13:45:15.826033287 +0000 UTC m=+829.824018334 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist") pod "speaker-vdx27" (UID: "6d48b95e-8866-4456-95c0-4c3b728f6f93") : secret "metallb-memberlist" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.326495 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6d48b95e-8866-4456-95c0-4c3b728f6f93-metallb-excludel2\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.330697 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-cert\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.363553 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvgzb\" (UniqueName: \"kubernetes.io/projected/6d48b95e-8866-4456-95c0-4c3b728f6f93-kube-api-access-fvgzb\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.368057 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2fpl\" (UniqueName: \"kubernetes.io/projected/6aca0805-4feb-4b13-8b46-f41858176496-kube-api-access-d2fpl\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.629021 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.629128 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9480abea-a1f1-4416-880b-8fb72fd8716b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.629224 4861 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.629376 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs podName:f3324863-98d8-44d0-91dc-4bb8dc243a8f nodeName:}" failed. No retries permitted until 2025-10-03 13:45:16.62935662 +0000 UTC m=+830.627341667 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs") pod "frr-k8s-xxhsl" (UID: "f3324863-98d8-44d0-91dc-4bb8dc243a8f") : secret "frr-k8s-certs-secret" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.634898 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9480abea-a1f1-4416-880b-8fb72fd8716b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-jn69p\" (UID: \"9480abea-a1f1-4416-880b-8fb72fd8716b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.831248 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-metrics-certs\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.831327 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-metrics-certs\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.831396 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.831599 4861 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 13:45:15 crc kubenswrapper[4861]: E1003 13:45:15.831662 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist podName:6d48b95e-8866-4456-95c0-4c3b728f6f93 nodeName:}" failed. No retries permitted until 2025-10-03 13:45:16.831644997 +0000 UTC m=+830.829630044 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist") pod "speaker-vdx27" (UID: "6d48b95e-8866-4456-95c0-4c3b728f6f93") : secret "metallb-memberlist" not found Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.835688 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6aca0805-4feb-4b13-8b46-f41858176496-metrics-certs\") pod \"controller-68d546b9d8-cm757\" (UID: \"6aca0805-4feb-4b13-8b46-f41858176496\") " pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.836018 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-metrics-certs\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:15 crc kubenswrapper[4861]: I1003 13:45:15.915904 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.018616 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.120104 4861 generic.go:334] "Generic (PLEG): container finished" podID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerID="657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31" exitCode=0 Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.120148 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjp8c" event={"ID":"1d830b12-c365-42ab-a9dc-583b4b3fabe9","Type":"ContainerDied","Data":"657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31"} Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.156885 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p"] Oct 03 13:45:16 crc kubenswrapper[4861]: W1003 13:45:16.167931 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9480abea_a1f1_4416_880b_8fb72fd8716b.slice/crio-db14ded7e5fab54f2b655b065af1ada2467ded860740f53cbe6762dce9f07d24 WatchSource:0}: Error finding container db14ded7e5fab54f2b655b065af1ada2467ded860740f53cbe6762dce9f07d24: Status 404 returned error can't find the container with id db14ded7e5fab54f2b655b065af1ada2467ded860740f53cbe6762dce9f07d24 Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.265997 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-cm757"] Oct 03 13:45:16 crc kubenswrapper[4861]: W1003 13:45:16.277144 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6aca0805_4feb_4b13_8b46_f41858176496.slice/crio-b96c02da946e4eae30b6b43f0f9b9c1f0fa8a41a5f643feeb21051cab213e6cb WatchSource:0}: Error finding container b96c02da946e4eae30b6b43f0f9b9c1f0fa8a41a5f643feeb21051cab213e6cb: Status 404 returned error can't find the container with id b96c02da946e4eae30b6b43f0f9b9c1f0fa8a41a5f643feeb21051cab213e6cb Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.655257 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.664052 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3324863-98d8-44d0-91dc-4bb8dc243a8f-metrics-certs\") pod \"frr-k8s-xxhsl\" (UID: \"f3324863-98d8-44d0-91dc-4bb8dc243a8f\") " pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.753605 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.858611 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.861481 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6d48b95e-8866-4456-95c0-4c3b728f6f93-memberlist\") pod \"speaker-vdx27\" (UID: \"6d48b95e-8866-4456-95c0-4c3b728f6f93\") " pod="metallb-system/speaker-vdx27" Oct 03 13:45:16 crc kubenswrapper[4861]: I1003 13:45:16.925361 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-vdx27" Oct 03 13:45:16 crc kubenswrapper[4861]: W1003 13:45:16.940487 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d48b95e_8866_4456_95c0_4c3b728f6f93.slice/crio-caa006e52f1551fdcbcb9472478f2cd0ff7ce4e09d090c772a2fcc82a7f39ae6 WatchSource:0}: Error finding container caa006e52f1551fdcbcb9472478f2cd0ff7ce4e09d090c772a2fcc82a7f39ae6: Status 404 returned error can't find the container with id caa006e52f1551fdcbcb9472478f2cd0ff7ce4e09d090c772a2fcc82a7f39ae6 Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.150060 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"a12a1c2ffe9f3645ecf5a52970dd8c9bf9a048380e4adaa5e59755e28bce60b4"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.163401 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" event={"ID":"9480abea-a1f1-4416-880b-8fb72fd8716b","Type":"ContainerStarted","Data":"db14ded7e5fab54f2b655b065af1ada2467ded860740f53cbe6762dce9f07d24"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.167847 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-vdx27" event={"ID":"6d48b95e-8866-4456-95c0-4c3b728f6f93","Type":"ContainerStarted","Data":"caa006e52f1551fdcbcb9472478f2cd0ff7ce4e09d090c772a2fcc82a7f39ae6"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.174660 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjp8c" event={"ID":"1d830b12-c365-42ab-a9dc-583b4b3fabe9","Type":"ContainerStarted","Data":"4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.189304 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-cm757" event={"ID":"6aca0805-4feb-4b13-8b46-f41858176496","Type":"ContainerStarted","Data":"a99659f6fdc7166e4b2edb88b0591150ad90bfe79146ab87c8acde299722da63"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.189346 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-cm757" event={"ID":"6aca0805-4feb-4b13-8b46-f41858176496","Type":"ContainerStarted","Data":"a7702f3a4720df111b947cde04e203876bc5e6dfa24135478faa2f99f8a8941d"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.189356 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-cm757" event={"ID":"6aca0805-4feb-4b13-8b46-f41858176496","Type":"ContainerStarted","Data":"b96c02da946e4eae30b6b43f0f9b9c1f0fa8a41a5f643feeb21051cab213e6cb"} Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.189454 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.218491 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pjp8c" podStartSLOduration=2.455949171 podStartE2EDuration="5.218470419s" podCreationTimestamp="2025-10-03 13:45:12 +0000 UTC" firstStartedPulling="2025-10-03 13:45:14.097016615 +0000 UTC m=+828.095001662" lastFinishedPulling="2025-10-03 13:45:16.859537863 +0000 UTC m=+830.857522910" observedRunningTime="2025-10-03 13:45:17.212404288 +0000 UTC m=+831.210389325" watchObservedRunningTime="2025-10-03 13:45:17.218470419 +0000 UTC m=+831.216455466" Oct 03 13:45:17 crc kubenswrapper[4861]: I1003 13:45:17.251211 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-cm757" podStartSLOduration=2.251195133 podStartE2EDuration="2.251195133s" podCreationTimestamp="2025-10-03 13:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:45:17.249404395 +0000 UTC m=+831.247389452" watchObservedRunningTime="2025-10-03 13:45:17.251195133 +0000 UTC m=+831.249180180" Oct 03 13:45:18 crc kubenswrapper[4861]: I1003 13:45:18.244547 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-vdx27" event={"ID":"6d48b95e-8866-4456-95c0-4c3b728f6f93","Type":"ContainerStarted","Data":"439e2b16a172d4d1ffc4e162b2206cbd73cd7114a8a07350e79b6905a1b51025"} Oct 03 13:45:18 crc kubenswrapper[4861]: I1003 13:45:18.244859 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-vdx27" event={"ID":"6d48b95e-8866-4456-95c0-4c3b728f6f93","Type":"ContainerStarted","Data":"cceefe992080350bda491d42457d4f2f71a6d2a0fe294830ed1e4db6a17df6ea"} Oct 03 13:45:18 crc kubenswrapper[4861]: I1003 13:45:18.246061 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-vdx27" Oct 03 13:45:18 crc kubenswrapper[4861]: I1003 13:45:18.280620 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-vdx27" podStartSLOduration=3.278214005 podStartE2EDuration="3.278214005s" podCreationTimestamp="2025-10-03 13:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:45:18.274557277 +0000 UTC m=+832.272542324" watchObservedRunningTime="2025-10-03 13:45:18.278214005 +0000 UTC m=+832.276199052" Oct 03 13:45:19 crc kubenswrapper[4861]: I1003 13:45:19.570410 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:19 crc kubenswrapper[4861]: I1003 13:45:19.570751 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:19 crc kubenswrapper[4861]: I1003 13:45:19.615767 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:20 crc kubenswrapper[4861]: I1003 13:45:20.325571 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:20 crc kubenswrapper[4861]: I1003 13:45:20.825241 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7lvf7"] Oct 03 13:45:22 crc kubenswrapper[4861]: I1003 13:45:22.280402 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7lvf7" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="registry-server" containerID="cri-o://5f91ad9b20d752875b9c2a97e9b8911905dbe3c61d0872c9eedb3a4e389bd737" gracePeriod=2 Oct 03 13:45:22 crc kubenswrapper[4861]: I1003 13:45:22.553710 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:22 crc kubenswrapper[4861]: I1003 13:45:22.554061 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:22 crc kubenswrapper[4861]: I1003 13:45:22.594799 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.236433 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4k4l7"] Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.237931 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.259058 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4k4l7"] Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.291968 4861 generic.go:334] "Generic (PLEG): container finished" podID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerID="5f91ad9b20d752875b9c2a97e9b8911905dbe3c61d0872c9eedb3a4e389bd737" exitCode=0 Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.292054 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerDied","Data":"5f91ad9b20d752875b9c2a97e9b8911905dbe3c61d0872c9eedb3a4e389bd737"} Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.340976 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.366507 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-catalog-content\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.366577 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qljcj\" (UniqueName: \"kubernetes.io/projected/efc1e6ec-91b9-4100-898f-4aeae1de5f44-kube-api-access-qljcj\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.366668 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-utilities\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.468357 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-utilities\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.468438 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-catalog-content\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.468472 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qljcj\" (UniqueName: \"kubernetes.io/projected/efc1e6ec-91b9-4100-898f-4aeae1de5f44-kube-api-access-qljcj\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.468992 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-utilities\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.469044 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-catalog-content\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.491964 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qljcj\" (UniqueName: \"kubernetes.io/projected/efc1e6ec-91b9-4100-898f-4aeae1de5f44-kube-api-access-qljcj\") pod \"certified-operators-4k4l7\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:23 crc kubenswrapper[4861]: I1003 13:45:23.581611 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.692001 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.748147 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4k4l7"] Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.784467 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w846c\" (UniqueName: \"kubernetes.io/projected/0c547d33-c876-4c98-b1ce-a61325cedd45-kube-api-access-w846c\") pod \"0c547d33-c876-4c98-b1ce-a61325cedd45\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.784548 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-catalog-content\") pod \"0c547d33-c876-4c98-b1ce-a61325cedd45\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.784648 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-utilities\") pod \"0c547d33-c876-4c98-b1ce-a61325cedd45\" (UID: \"0c547d33-c876-4c98-b1ce-a61325cedd45\") " Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.786018 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-utilities" (OuterVolumeSpecName: "utilities") pod "0c547d33-c876-4c98-b1ce-a61325cedd45" (UID: "0c547d33-c876-4c98-b1ce-a61325cedd45"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.795121 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c547d33-c876-4c98-b1ce-a61325cedd45-kube-api-access-w846c" (OuterVolumeSpecName: "kube-api-access-w846c") pod "0c547d33-c876-4c98-b1ce-a61325cedd45" (UID: "0c547d33-c876-4c98-b1ce-a61325cedd45"). InnerVolumeSpecName "kube-api-access-w846c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.838542 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0c547d33-c876-4c98-b1ce-a61325cedd45" (UID: "0c547d33-c876-4c98-b1ce-a61325cedd45"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.886154 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.886202 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w846c\" (UniqueName: \"kubernetes.io/projected/0c547d33-c876-4c98-b1ce-a61325cedd45-kube-api-access-w846c\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:24 crc kubenswrapper[4861]: I1003 13:45:24.886216 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c547d33-c876-4c98-b1ce-a61325cedd45-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.024873 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjp8c"] Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.309606 4861 generic.go:334] "Generic (PLEG): container finished" podID="f3324863-98d8-44d0-91dc-4bb8dc243a8f" containerID="3c9de2d6d7a1d6426127d1965b505fb6cb4dc86033bb25a4cf0b8d75baa9203d" exitCode=0 Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.309650 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerDied","Data":"3c9de2d6d7a1d6426127d1965b505fb6cb4dc86033bb25a4cf0b8d75baa9203d"} Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.312554 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7lvf7" event={"ID":"0c547d33-c876-4c98-b1ce-a61325cedd45","Type":"ContainerDied","Data":"f1e83a9d4ad8508d486e48f2cfd1df63683f655ce79185b8991a89c9ee31dfc9"} Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.312569 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7lvf7" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.312591 4861 scope.go:117] "RemoveContainer" containerID="5f91ad9b20d752875b9c2a97e9b8911905dbe3c61d0872c9eedb3a4e389bd737" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.316028 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" event={"ID":"9480abea-a1f1-4416-880b-8fb72fd8716b","Type":"ContainerStarted","Data":"f50664ff2eea0aad9411c232a1c9ce18bfc9464495ba4c371d0cda5e973ab457"} Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.316425 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.318209 4861 generic.go:334] "Generic (PLEG): container finished" podID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerID="07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932" exitCode=0 Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.318579 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pjp8c" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="registry-server" containerID="cri-o://4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305" gracePeriod=2 Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.318655 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4k4l7" event={"ID":"efc1e6ec-91b9-4100-898f-4aeae1de5f44","Type":"ContainerDied","Data":"07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932"} Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.318688 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4k4l7" event={"ID":"efc1e6ec-91b9-4100-898f-4aeae1de5f44","Type":"ContainerStarted","Data":"4a7b80e50c034a22fba4823da752a12bbe9f1a79b2aa898e425b55be8652fadc"} Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.345674 4861 scope.go:117] "RemoveContainer" containerID="5c4283ee99b102c851265075b1f36a68f1bb7afa9b621d8219a60c1c65e1685a" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.394579 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" podStartSLOduration=3.014252551 podStartE2EDuration="11.394559639s" podCreationTimestamp="2025-10-03 13:45:14 +0000 UTC" firstStartedPulling="2025-10-03 13:45:16.169670646 +0000 UTC m=+830.167655683" lastFinishedPulling="2025-10-03 13:45:24.549977724 +0000 UTC m=+838.547962771" observedRunningTime="2025-10-03 13:45:25.380694559 +0000 UTC m=+839.378679606" watchObservedRunningTime="2025-10-03 13:45:25.394559639 +0000 UTC m=+839.392544686" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.395189 4861 scope.go:117] "RemoveContainer" containerID="30a549db6728b13f808ac91415818c9805ecb9c4f2aa39cb1f6fe254cf69b313" Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.400252 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7lvf7"] Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.405764 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7lvf7"] Oct 03 13:45:25 crc kubenswrapper[4861]: I1003 13:45:25.825035 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.006219 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-utilities\") pod \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.006337 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4nx8\" (UniqueName: \"kubernetes.io/projected/1d830b12-c365-42ab-a9dc-583b4b3fabe9-kube-api-access-s4nx8\") pod \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.006481 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-catalog-content\") pod \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\" (UID: \"1d830b12-c365-42ab-a9dc-583b4b3fabe9\") " Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.007151 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-utilities" (OuterVolumeSpecName: "utilities") pod "1d830b12-c365-42ab-a9dc-583b4b3fabe9" (UID: "1d830b12-c365-42ab-a9dc-583b4b3fabe9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.023560 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d830b12-c365-42ab-a9dc-583b4b3fabe9" (UID: "1d830b12-c365-42ab-a9dc-583b4b3fabe9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.023934 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d830b12-c365-42ab-a9dc-583b4b3fabe9-kube-api-access-s4nx8" (OuterVolumeSpecName: "kube-api-access-s4nx8") pod "1d830b12-c365-42ab-a9dc-583b4b3fabe9" (UID: "1d830b12-c365-42ab-a9dc-583b4b3fabe9"). InnerVolumeSpecName "kube-api-access-s4nx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.026291 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-cm757" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.108267 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.108302 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d830b12-c365-42ab-a9dc-583b4b3fabe9-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.108312 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4nx8\" (UniqueName: \"kubernetes.io/projected/1d830b12-c365-42ab-a9dc-583b4b3fabe9-kube-api-access-s4nx8\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.352394 4861 generic.go:334] "Generic (PLEG): container finished" podID="f3324863-98d8-44d0-91dc-4bb8dc243a8f" containerID="0380fdafab7f57f54bf58067832049e32b472ecdc238a29b5f45bf7352c3721c" exitCode=0 Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.352525 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerDied","Data":"0380fdafab7f57f54bf58067832049e32b472ecdc238a29b5f45bf7352c3721c"} Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.359935 4861 generic.go:334] "Generic (PLEG): container finished" podID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerID="4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305" exitCode=0 Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.360084 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjp8c" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.360169 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjp8c" event={"ID":"1d830b12-c365-42ab-a9dc-583b4b3fabe9","Type":"ContainerDied","Data":"4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305"} Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.360212 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjp8c" event={"ID":"1d830b12-c365-42ab-a9dc-583b4b3fabe9","Type":"ContainerDied","Data":"de71a00c40e329efd3511f3ed5f0198e27af50c0a29dc55643efde6502003cef"} Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.360246 4861 scope.go:117] "RemoveContainer" containerID="4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.384482 4861 scope.go:117] "RemoveContainer" containerID="657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.399674 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjp8c"] Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.406629 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjp8c"] Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.419409 4861 scope.go:117] "RemoveContainer" containerID="0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.441490 4861 scope.go:117] "RemoveContainer" containerID="4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305" Oct 03 13:45:26 crc kubenswrapper[4861]: E1003 13:45:26.442456 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305\": container with ID starting with 4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305 not found: ID does not exist" containerID="4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.442552 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305"} err="failed to get container status \"4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305\": rpc error: code = NotFound desc = could not find container \"4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305\": container with ID starting with 4f8e6ea2f1f0a550d9a921eb3916b6f604701377010b59bb40f2991d87fbb305 not found: ID does not exist" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.442598 4861 scope.go:117] "RemoveContainer" containerID="657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31" Oct 03 13:45:26 crc kubenswrapper[4861]: E1003 13:45:26.442974 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31\": container with ID starting with 657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31 not found: ID does not exist" containerID="657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.443028 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31"} err="failed to get container status \"657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31\": rpc error: code = NotFound desc = could not find container \"657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31\": container with ID starting with 657c1cc7b1e78990c9e2388736ce1ae9b18c7e1341bc592741574d6d02317b31 not found: ID does not exist" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.443048 4861 scope.go:117] "RemoveContainer" containerID="0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0" Oct 03 13:45:26 crc kubenswrapper[4861]: E1003 13:45:26.443745 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0\": container with ID starting with 0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0 not found: ID does not exist" containerID="0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.443798 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0"} err="failed to get container status \"0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0\": rpc error: code = NotFound desc = could not find container \"0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0\": container with ID starting with 0af3c312686221d4e2d1065cd72d8bb25338a71199949c1499b9a40fc1eba8f0 not found: ID does not exist" Oct 03 13:45:26 crc kubenswrapper[4861]: E1003 13:45:26.465491 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d830b12_c365_42ab_a9dc_583b4b3fabe9.slice/crio-de71a00c40e329efd3511f3ed5f0198e27af50c0a29dc55643efde6502003cef\": RecentStats: unable to find data in memory cache]" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.688835 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" path="/var/lib/kubelet/pods/0c547d33-c876-4c98-b1ce-a61325cedd45/volumes" Oct 03 13:45:26 crc kubenswrapper[4861]: I1003 13:45:26.689699 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" path="/var/lib/kubelet/pods/1d830b12-c365-42ab-a9dc-583b4b3fabe9/volumes" Oct 03 13:45:27 crc kubenswrapper[4861]: I1003 13:45:27.366783 4861 generic.go:334] "Generic (PLEG): container finished" podID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerID="ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b" exitCode=0 Oct 03 13:45:27 crc kubenswrapper[4861]: I1003 13:45:27.366839 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4k4l7" event={"ID":"efc1e6ec-91b9-4100-898f-4aeae1de5f44","Type":"ContainerDied","Data":"ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b"} Oct 03 13:45:27 crc kubenswrapper[4861]: I1003 13:45:27.368985 4861 generic.go:334] "Generic (PLEG): container finished" podID="f3324863-98d8-44d0-91dc-4bb8dc243a8f" containerID="b052c2f6ccb89a9fdfb6470771eaeeb4ed6347b23d2c2557a324a14ec56be11c" exitCode=0 Oct 03 13:45:27 crc kubenswrapper[4861]: I1003 13:45:27.369011 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerDied","Data":"b052c2f6ccb89a9fdfb6470771eaeeb4ed6347b23d2c2557a324a14ec56be11c"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.391532 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4k4l7" event={"ID":"efc1e6ec-91b9-4100-898f-4aeae1de5f44","Type":"ContainerStarted","Data":"a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.408515 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"2041ef3bcae3e3493ed57351aa96670771c7063637bf0c3806be9db42006afbb"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.408561 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"0ae7e4369884746eeffdf111a5994fe36491977019a0c43ca0f95a03e6c46f9b"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.408575 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"0f048c6b840c48e049bcaed8b15f483db556a3d1700d7532308db882b68af99b"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.408586 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"857ec6bfcebddb1ecd01d87d7d7b8700eaaf435f9eab6b056e4a176d4463f673"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.408597 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"e0f28c57122ad39eb2f6f03d56eec1115e5cf98e2463480ad4ac06af92e6263d"} Oct 03 13:45:28 crc kubenswrapper[4861]: I1003 13:45:28.426686 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4k4l7" podStartSLOduration=2.97364679 podStartE2EDuration="5.42667268s" podCreationTimestamp="2025-10-03 13:45:23 +0000 UTC" firstStartedPulling="2025-10-03 13:45:25.320662508 +0000 UTC m=+839.318647555" lastFinishedPulling="2025-10-03 13:45:27.773688398 +0000 UTC m=+841.771673445" observedRunningTime="2025-10-03 13:45:28.426261149 +0000 UTC m=+842.424246206" watchObservedRunningTime="2025-10-03 13:45:28.42667268 +0000 UTC m=+842.424657727" Oct 03 13:45:29 crc kubenswrapper[4861]: I1003 13:45:29.421503 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xxhsl" event={"ID":"f3324863-98d8-44d0-91dc-4bb8dc243a8f","Type":"ContainerStarted","Data":"dd46376e87b5fd5e758cd6d982725564a818addf82c33a47427d1995829c933a"} Oct 03 13:45:29 crc kubenswrapper[4861]: I1003 13:45:29.442479 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-xxhsl" podStartSLOduration=7.7804728260000005 podStartE2EDuration="15.442459038s" podCreationTimestamp="2025-10-03 13:45:14 +0000 UTC" firstStartedPulling="2025-10-03 13:45:16.859535003 +0000 UTC m=+830.857520050" lastFinishedPulling="2025-10-03 13:45:24.521521215 +0000 UTC m=+838.519506262" observedRunningTime="2025-10-03 13:45:29.438311549 +0000 UTC m=+843.436296606" watchObservedRunningTime="2025-10-03 13:45:29.442459038 +0000 UTC m=+843.440444095" Oct 03 13:45:30 crc kubenswrapper[4861]: I1003 13:45:30.427902 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:31 crc kubenswrapper[4861]: I1003 13:45:31.754505 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:31 crc kubenswrapper[4861]: I1003 13:45:31.790792 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:33 crc kubenswrapper[4861]: I1003 13:45:33.582485 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:33 crc kubenswrapper[4861]: I1003 13:45:33.582559 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:33 crc kubenswrapper[4861]: I1003 13:45:33.623929 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:34 crc kubenswrapper[4861]: I1003 13:45:34.495854 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:34 crc kubenswrapper[4861]: I1003 13:45:34.532862 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4k4l7"] Oct 03 13:45:35 crc kubenswrapper[4861]: I1003 13:45:35.922305 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-jn69p" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.457632 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4k4l7" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="registry-server" containerID="cri-o://a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a" gracePeriod=2 Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.766824 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.786142 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-catalog-content\") pod \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.786313 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-utilities\") pod \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.786524 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qljcj\" (UniqueName: \"kubernetes.io/projected/efc1e6ec-91b9-4100-898f-4aeae1de5f44-kube-api-access-qljcj\") pod \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\" (UID: \"efc1e6ec-91b9-4100-898f-4aeae1de5f44\") " Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.808173 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-utilities" (OuterVolumeSpecName: "utilities") pod "efc1e6ec-91b9-4100-898f-4aeae1de5f44" (UID: "efc1e6ec-91b9-4100-898f-4aeae1de5f44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.815125 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc1e6ec-91b9-4100-898f-4aeae1de5f44-kube-api-access-qljcj" (OuterVolumeSpecName: "kube-api-access-qljcj") pod "efc1e6ec-91b9-4100-898f-4aeae1de5f44" (UID: "efc1e6ec-91b9-4100-898f-4aeae1de5f44"). InnerVolumeSpecName "kube-api-access-qljcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.851375 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efc1e6ec-91b9-4100-898f-4aeae1de5f44" (UID: "efc1e6ec-91b9-4100-898f-4aeae1de5f44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.888159 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.888530 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qljcj\" (UniqueName: \"kubernetes.io/projected/efc1e6ec-91b9-4100-898f-4aeae1de5f44-kube-api-access-qljcj\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.888604 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc1e6ec-91b9-4100-898f-4aeae1de5f44-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:36 crc kubenswrapper[4861]: I1003 13:45:36.929412 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-vdx27" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.465156 4861 generic.go:334] "Generic (PLEG): container finished" podID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerID="a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a" exitCode=0 Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.465316 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4k4l7" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.465346 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4k4l7" event={"ID":"efc1e6ec-91b9-4100-898f-4aeae1de5f44","Type":"ContainerDied","Data":"a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a"} Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.465650 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4k4l7" event={"ID":"efc1e6ec-91b9-4100-898f-4aeae1de5f44","Type":"ContainerDied","Data":"4a7b80e50c034a22fba4823da752a12bbe9f1a79b2aa898e425b55be8652fadc"} Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.465762 4861 scope.go:117] "RemoveContainer" containerID="a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.486627 4861 scope.go:117] "RemoveContainer" containerID="ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.501954 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4k4l7"] Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.505291 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4k4l7"] Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.507789 4861 scope.go:117] "RemoveContainer" containerID="07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.528531 4861 scope.go:117] "RemoveContainer" containerID="a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a" Oct 03 13:45:37 crc kubenswrapper[4861]: E1003 13:45:37.529756 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a\": container with ID starting with a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a not found: ID does not exist" containerID="a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.529804 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a"} err="failed to get container status \"a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a\": rpc error: code = NotFound desc = could not find container \"a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a\": container with ID starting with a9a892817a5c95dcf9a3372d2c697eff58aadfdf66cec247d51becb3d0036d4a not found: ID does not exist" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.529834 4861 scope.go:117] "RemoveContainer" containerID="ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b" Oct 03 13:45:37 crc kubenswrapper[4861]: E1003 13:45:37.530203 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b\": container with ID starting with ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b not found: ID does not exist" containerID="ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.530310 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b"} err="failed to get container status \"ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b\": rpc error: code = NotFound desc = could not find container \"ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b\": container with ID starting with ec4c597e9d10c510007a51dd9662a90301e52eaa6930ce888e7b46cbd9001a0b not found: ID does not exist" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.530396 4861 scope.go:117] "RemoveContainer" containerID="07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932" Oct 03 13:45:37 crc kubenswrapper[4861]: E1003 13:45:37.530753 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932\": container with ID starting with 07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932 not found: ID does not exist" containerID="07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932" Oct 03 13:45:37 crc kubenswrapper[4861]: I1003 13:45:37.530850 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932"} err="failed to get container status \"07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932\": rpc error: code = NotFound desc = could not find container \"07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932\": container with ID starting with 07d7caf4219d28d4255ea1b9fe67c1cb9755f4e2ffc0de05d8be6ed8aed3c932 not found: ID does not exist" Oct 03 13:45:38 crc kubenswrapper[4861]: I1003 13:45:38.689412 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" path="/var/lib/kubelet/pods/efc1e6ec-91b9-4100-898f-4aeae1de5f44/volumes" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.737709 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qbwmp"] Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.737995 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738010 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738026 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="extract-utilities" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738034 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="extract-utilities" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738046 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738054 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738065 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="extract-utilities" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738073 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="extract-utilities" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738090 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="extract-content" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738097 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="extract-content" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738109 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="extract-content" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738118 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="extract-content" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738129 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="extract-utilities" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738136 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="extract-utilities" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738145 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738152 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: E1003 13:45:39.738162 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="extract-content" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738170 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="extract-content" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738324 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="efc1e6ec-91b9-4100-898f-4aeae1de5f44" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738342 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c547d33-c876-4c98-b1ce-a61325cedd45" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738357 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d830b12-c365-42ab-a9dc-583b4b3fabe9" containerName="registry-server" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.738823 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.740540 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qh5kn" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.743606 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.745146 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.804724 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qbwmp"] Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.825544 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxrk9\" (UniqueName: \"kubernetes.io/projected/a80a260c-3304-42fa-b9c5-b17a621f7239-kube-api-access-kxrk9\") pod \"openstack-operator-index-qbwmp\" (UID: \"a80a260c-3304-42fa-b9c5-b17a621f7239\") " pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.926395 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxrk9\" (UniqueName: \"kubernetes.io/projected/a80a260c-3304-42fa-b9c5-b17a621f7239-kube-api-access-kxrk9\") pod \"openstack-operator-index-qbwmp\" (UID: \"a80a260c-3304-42fa-b9c5-b17a621f7239\") " pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:39 crc kubenswrapper[4861]: I1003 13:45:39.950278 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxrk9\" (UniqueName: \"kubernetes.io/projected/a80a260c-3304-42fa-b9c5-b17a621f7239-kube-api-access-kxrk9\") pod \"openstack-operator-index-qbwmp\" (UID: \"a80a260c-3304-42fa-b9c5-b17a621f7239\") " pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:40 crc kubenswrapper[4861]: I1003 13:45:40.054484 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:40 crc kubenswrapper[4861]: I1003 13:45:40.480912 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qbwmp"] Oct 03 13:45:40 crc kubenswrapper[4861]: W1003 13:45:40.484146 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda80a260c_3304_42fa_b9c5_b17a621f7239.slice/crio-63226139be54146d3b5240c51bc5c91bad41842ba8e347776d4b5816e96c34dc WatchSource:0}: Error finding container 63226139be54146d3b5240c51bc5c91bad41842ba8e347776d4b5816e96c34dc: Status 404 returned error can't find the container with id 63226139be54146d3b5240c51bc5c91bad41842ba8e347776d4b5816e96c34dc Oct 03 13:45:41 crc kubenswrapper[4861]: I1003 13:45:41.493330 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qbwmp" event={"ID":"a80a260c-3304-42fa-b9c5-b17a621f7239","Type":"ContainerStarted","Data":"63226139be54146d3b5240c51bc5c91bad41842ba8e347776d4b5816e96c34dc"} Oct 03 13:45:43 crc kubenswrapper[4861]: I1003 13:45:43.505744 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qbwmp" event={"ID":"a80a260c-3304-42fa-b9c5-b17a621f7239","Type":"ContainerStarted","Data":"d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a"} Oct 03 13:45:43 crc kubenswrapper[4861]: I1003 13:45:43.524425 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-qbwmp" podStartSLOduration=2.098932717 podStartE2EDuration="4.524403975s" podCreationTimestamp="2025-10-03 13:45:39 +0000 UTC" firstStartedPulling="2025-10-03 13:45:40.486638235 +0000 UTC m=+854.484623282" lastFinishedPulling="2025-10-03 13:45:42.912109493 +0000 UTC m=+856.910094540" observedRunningTime="2025-10-03 13:45:43.523631346 +0000 UTC m=+857.521616403" watchObservedRunningTime="2025-10-03 13:45:43.524403975 +0000 UTC m=+857.522389022" Oct 03 13:45:44 crc kubenswrapper[4861]: I1003 13:45:44.053943 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qbwmp"] Oct 03 13:45:44 crc kubenswrapper[4861]: I1003 13:45:44.865214 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-tkc8v"] Oct 03 13:45:44 crc kubenswrapper[4861]: I1003 13:45:44.872203 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:44 crc kubenswrapper[4861]: I1003 13:45:44.873665 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-tkc8v"] Oct 03 13:45:44 crc kubenswrapper[4861]: I1003 13:45:44.891291 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnfng\" (UniqueName: \"kubernetes.io/projected/48467041-0fb2-4032-a831-11500776f212-kube-api-access-vnfng\") pod \"openstack-operator-index-tkc8v\" (UID: \"48467041-0fb2-4032-a831-11500776f212\") " pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:44 crc kubenswrapper[4861]: I1003 13:45:44.992502 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnfng\" (UniqueName: \"kubernetes.io/projected/48467041-0fb2-4032-a831-11500776f212-kube-api-access-vnfng\") pod \"openstack-operator-index-tkc8v\" (UID: \"48467041-0fb2-4032-a831-11500776f212\") " pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:45 crc kubenswrapper[4861]: I1003 13:45:45.016433 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnfng\" (UniqueName: \"kubernetes.io/projected/48467041-0fb2-4032-a831-11500776f212-kube-api-access-vnfng\") pod \"openstack-operator-index-tkc8v\" (UID: \"48467041-0fb2-4032-a831-11500776f212\") " pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:45 crc kubenswrapper[4861]: I1003 13:45:45.194831 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:45 crc kubenswrapper[4861]: I1003 13:45:45.515707 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-qbwmp" podUID="a80a260c-3304-42fa-b9c5-b17a621f7239" containerName="registry-server" containerID="cri-o://d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a" gracePeriod=2 Oct 03 13:45:45 crc kubenswrapper[4861]: I1003 13:45:45.589645 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-tkc8v"] Oct 03 13:45:45 crc kubenswrapper[4861]: W1003 13:45:45.593082 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48467041_0fb2_4032_a831_11500776f212.slice/crio-e803810dc8a4493713d18c3d73f0f7b537b97241f7c24c42b6798501f9f9f3bd WatchSource:0}: Error finding container e803810dc8a4493713d18c3d73f0f7b537b97241f7c24c42b6798501f9f9f3bd: Status 404 returned error can't find the container with id e803810dc8a4493713d18c3d73f0f7b537b97241f7c24c42b6798501f9f9f3bd Oct 03 13:45:45 crc kubenswrapper[4861]: I1003 13:45:45.857906 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.005422 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxrk9\" (UniqueName: \"kubernetes.io/projected/a80a260c-3304-42fa-b9c5-b17a621f7239-kube-api-access-kxrk9\") pod \"a80a260c-3304-42fa-b9c5-b17a621f7239\" (UID: \"a80a260c-3304-42fa-b9c5-b17a621f7239\") " Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.011354 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a80a260c-3304-42fa-b9c5-b17a621f7239-kube-api-access-kxrk9" (OuterVolumeSpecName: "kube-api-access-kxrk9") pod "a80a260c-3304-42fa-b9c5-b17a621f7239" (UID: "a80a260c-3304-42fa-b9c5-b17a621f7239"). InnerVolumeSpecName "kube-api-access-kxrk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.107348 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxrk9\" (UniqueName: \"kubernetes.io/projected/a80a260c-3304-42fa-b9c5-b17a621f7239-kube-api-access-kxrk9\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.522610 4861 generic.go:334] "Generic (PLEG): container finished" podID="a80a260c-3304-42fa-b9c5-b17a621f7239" containerID="d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a" exitCode=0 Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.522680 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qbwmp" event={"ID":"a80a260c-3304-42fa-b9c5-b17a621f7239","Type":"ContainerDied","Data":"d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a"} Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.522686 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qbwmp" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.522707 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qbwmp" event={"ID":"a80a260c-3304-42fa-b9c5-b17a621f7239","Type":"ContainerDied","Data":"63226139be54146d3b5240c51bc5c91bad41842ba8e347776d4b5816e96c34dc"} Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.522726 4861 scope.go:117] "RemoveContainer" containerID="d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.525071 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-tkc8v" event={"ID":"48467041-0fb2-4032-a831-11500776f212","Type":"ContainerStarted","Data":"7e1e50b60bf05a7a10d3cfeadc2db13a567838cf1f645f6f55f01f6d99a94c08"} Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.525122 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-tkc8v" event={"ID":"48467041-0fb2-4032-a831-11500776f212","Type":"ContainerStarted","Data":"e803810dc8a4493713d18c3d73f0f7b537b97241f7c24c42b6798501f9f9f3bd"} Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.544029 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-tkc8v" podStartSLOduration=2.495857727 podStartE2EDuration="2.544013041s" podCreationTimestamp="2025-10-03 13:45:44 +0000 UTC" firstStartedPulling="2025-10-03 13:45:45.596941135 +0000 UTC m=+859.594926182" lastFinishedPulling="2025-10-03 13:45:45.645096439 +0000 UTC m=+859.643081496" observedRunningTime="2025-10-03 13:45:46.540651492 +0000 UTC m=+860.538636539" watchObservedRunningTime="2025-10-03 13:45:46.544013041 +0000 UTC m=+860.541998088" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.547161 4861 scope.go:117] "RemoveContainer" containerID="d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a" Oct 03 13:45:46 crc kubenswrapper[4861]: E1003 13:45:46.547606 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a\": container with ID starting with d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a not found: ID does not exist" containerID="d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.547675 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a"} err="failed to get container status \"d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a\": rpc error: code = NotFound desc = could not find container \"d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a\": container with ID starting with d0875d3a45936e22e22c25ae9069df1b4be5d305549df9daf198f81414c8710a not found: ID does not exist" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.562962 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qbwmp"] Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.567627 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-qbwmp"] Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.689835 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a80a260c-3304-42fa-b9c5-b17a621f7239" path="/var/lib/kubelet/pods/a80a260c-3304-42fa-b9c5-b17a621f7239/volumes" Oct 03 13:45:46 crc kubenswrapper[4861]: I1003 13:45:46.756862 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-xxhsl" Oct 03 13:45:55 crc kubenswrapper[4861]: I1003 13:45:55.195681 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:55 crc kubenswrapper[4861]: I1003 13:45:55.196254 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:55 crc kubenswrapper[4861]: I1003 13:45:55.222009 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:45:55 crc kubenswrapper[4861]: I1003 13:45:55.603861 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-tkc8v" Oct 03 13:46:00 crc kubenswrapper[4861]: I1003 13:46:00.144886 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:46:00 crc kubenswrapper[4861]: I1003 13:46:00.147003 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.098158 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5"] Oct 03 13:46:02 crc kubenswrapper[4861]: E1003 13:46:02.098566 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a80a260c-3304-42fa-b9c5-b17a621f7239" containerName="registry-server" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.098589 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a80a260c-3304-42fa-b9c5-b17a621f7239" containerName="registry-server" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.098796 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a80a260c-3304-42fa-b9c5-b17a621f7239" containerName="registry-server" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.100387 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.102470 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-hllst" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.148612 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5"] Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.218401 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-bundle\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.218448 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-util\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.218514 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8x6x\" (UniqueName: \"kubernetes.io/projected/439d7722-c1ce-4a61-b781-499e9278b8d5-kube-api-access-w8x6x\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.320273 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8x6x\" (UniqueName: \"kubernetes.io/projected/439d7722-c1ce-4a61-b781-499e9278b8d5-kube-api-access-w8x6x\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.320380 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-bundle\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.320440 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-util\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.320917 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-bundle\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.320978 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-util\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.338098 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8x6x\" (UniqueName: \"kubernetes.io/projected/439d7722-c1ce-4a61-b781-499e9278b8d5-kube-api-access-w8x6x\") pod \"4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.428379 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:02 crc kubenswrapper[4861]: I1003 13:46:02.824790 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5"] Oct 03 13:46:03 crc kubenswrapper[4861]: I1003 13:46:03.620075 4861 generic.go:334] "Generic (PLEG): container finished" podID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerID="e7231f85333bed8e90a04813750b3a597945c8e6eb8cbd57104c1f0dcd57165f" exitCode=0 Oct 03 13:46:03 crc kubenswrapper[4861]: I1003 13:46:03.620122 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" event={"ID":"439d7722-c1ce-4a61-b781-499e9278b8d5","Type":"ContainerDied","Data":"e7231f85333bed8e90a04813750b3a597945c8e6eb8cbd57104c1f0dcd57165f"} Oct 03 13:46:03 crc kubenswrapper[4861]: I1003 13:46:03.620153 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" event={"ID":"439d7722-c1ce-4a61-b781-499e9278b8d5","Type":"ContainerStarted","Data":"cb62e8ac9b3cdadc246a4f2c53c22bca6bdb87903060b6a4cdcf17a98cd27816"} Oct 03 13:46:04 crc kubenswrapper[4861]: I1003 13:46:04.627297 4861 generic.go:334] "Generic (PLEG): container finished" podID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerID="1760f76f379ad3050309427e19b1e08b9649619208103c49fd85b5ca6994473b" exitCode=0 Oct 03 13:46:04 crc kubenswrapper[4861]: I1003 13:46:04.628453 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" event={"ID":"439d7722-c1ce-4a61-b781-499e9278b8d5","Type":"ContainerDied","Data":"1760f76f379ad3050309427e19b1e08b9649619208103c49fd85b5ca6994473b"} Oct 03 13:46:05 crc kubenswrapper[4861]: I1003 13:46:05.637747 4861 generic.go:334] "Generic (PLEG): container finished" podID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerID="e1c7f8e5d9f0ee0cff2178e53f9e0d6b97a2e30f0eb8de9fb9f3ecefea8a152d" exitCode=0 Oct 03 13:46:05 crc kubenswrapper[4861]: I1003 13:46:05.637864 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" event={"ID":"439d7722-c1ce-4a61-b781-499e9278b8d5","Type":"ContainerDied","Data":"e1c7f8e5d9f0ee0cff2178e53f9e0d6b97a2e30f0eb8de9fb9f3ecefea8a152d"} Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.888298 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.985133 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8x6x\" (UniqueName: \"kubernetes.io/projected/439d7722-c1ce-4a61-b781-499e9278b8d5-kube-api-access-w8x6x\") pod \"439d7722-c1ce-4a61-b781-499e9278b8d5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.985294 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-bundle\") pod \"439d7722-c1ce-4a61-b781-499e9278b8d5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.985327 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-util\") pod \"439d7722-c1ce-4a61-b781-499e9278b8d5\" (UID: \"439d7722-c1ce-4a61-b781-499e9278b8d5\") " Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.985944 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-bundle" (OuterVolumeSpecName: "bundle") pod "439d7722-c1ce-4a61-b781-499e9278b8d5" (UID: "439d7722-c1ce-4a61-b781-499e9278b8d5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.992511 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/439d7722-c1ce-4a61-b781-499e9278b8d5-kube-api-access-w8x6x" (OuterVolumeSpecName: "kube-api-access-w8x6x") pod "439d7722-c1ce-4a61-b781-499e9278b8d5" (UID: "439d7722-c1ce-4a61-b781-499e9278b8d5"). InnerVolumeSpecName "kube-api-access-w8x6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:46:06 crc kubenswrapper[4861]: I1003 13:46:06.998401 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-util" (OuterVolumeSpecName: "util") pod "439d7722-c1ce-4a61-b781-499e9278b8d5" (UID: "439d7722-c1ce-4a61-b781-499e9278b8d5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:46:07 crc kubenswrapper[4861]: I1003 13:46:07.086541 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8x6x\" (UniqueName: \"kubernetes.io/projected/439d7722-c1ce-4a61-b781-499e9278b8d5-kube-api-access-w8x6x\") on node \"crc\" DevicePath \"\"" Oct 03 13:46:07 crc kubenswrapper[4861]: I1003 13:46:07.086568 4861 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:46:07 crc kubenswrapper[4861]: I1003 13:46:07.086578 4861 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/439d7722-c1ce-4a61-b781-499e9278b8d5-util\") on node \"crc\" DevicePath \"\"" Oct 03 13:46:07 crc kubenswrapper[4861]: I1003 13:46:07.651944 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" event={"ID":"439d7722-c1ce-4a61-b781-499e9278b8d5","Type":"ContainerDied","Data":"cb62e8ac9b3cdadc246a4f2c53c22bca6bdb87903060b6a4cdcf17a98cd27816"} Oct 03 13:46:07 crc kubenswrapper[4861]: I1003 13:46:07.652193 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb62e8ac9b3cdadc246a4f2c53c22bca6bdb87903060b6a4cdcf17a98cd27816" Oct 03 13:46:07 crc kubenswrapper[4861]: I1003 13:46:07.652010 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.565118 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm"] Oct 03 13:46:14 crc kubenswrapper[4861]: E1003 13:46:14.565963 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="pull" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.565979 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="pull" Oct 03 13:46:14 crc kubenswrapper[4861]: E1003 13:46:14.565994 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="extract" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.566002 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="extract" Oct 03 13:46:14 crc kubenswrapper[4861]: E1003 13:46:14.566019 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="util" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.566026 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="util" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.566164 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="439d7722-c1ce-4a61-b781-499e9278b8d5" containerName="extract" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.566921 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.571803 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-xpnqq" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.648046 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm"] Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.683048 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6x9s\" (UniqueName: \"kubernetes.io/projected/49f1e7a1-ab0a-46b2-97c6-a069d913657d-kube-api-access-v6x9s\") pod \"openstack-operator-controller-operator-5b7969687c-wz6wm\" (UID: \"49f1e7a1-ab0a-46b2-97c6-a069d913657d\") " pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.784524 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6x9s\" (UniqueName: \"kubernetes.io/projected/49f1e7a1-ab0a-46b2-97c6-a069d913657d-kube-api-access-v6x9s\") pod \"openstack-operator-controller-operator-5b7969687c-wz6wm\" (UID: \"49f1e7a1-ab0a-46b2-97c6-a069d913657d\") " pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.805803 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6x9s\" (UniqueName: \"kubernetes.io/projected/49f1e7a1-ab0a-46b2-97c6-a069d913657d-kube-api-access-v6x9s\") pod \"openstack-operator-controller-operator-5b7969687c-wz6wm\" (UID: \"49f1e7a1-ab0a-46b2-97c6-a069d913657d\") " pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:14 crc kubenswrapper[4861]: I1003 13:46:14.885510 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:15 crc kubenswrapper[4861]: I1003 13:46:15.316212 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm"] Oct 03 13:46:15 crc kubenswrapper[4861]: W1003 13:46:15.321437 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49f1e7a1_ab0a_46b2_97c6_a069d913657d.slice/crio-350a207272037aa867f5980528a73ac59377da7c6738d08f91a9fbbe18fc0644 WatchSource:0}: Error finding container 350a207272037aa867f5980528a73ac59377da7c6738d08f91a9fbbe18fc0644: Status 404 returned error can't find the container with id 350a207272037aa867f5980528a73ac59377da7c6738d08f91a9fbbe18fc0644 Oct 03 13:46:15 crc kubenswrapper[4861]: I1003 13:46:15.704151 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" event={"ID":"49f1e7a1-ab0a-46b2-97c6-a069d913657d","Type":"ContainerStarted","Data":"350a207272037aa867f5980528a73ac59377da7c6738d08f91a9fbbe18fc0644"} Oct 03 13:46:20 crc kubenswrapper[4861]: I1003 13:46:20.744594 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" event={"ID":"49f1e7a1-ab0a-46b2-97c6-a069d913657d","Type":"ContainerStarted","Data":"f4c8286ccd82291dad4fc3111d5488c7787c2cb46db5d8dbe99435c686fe4008"} Oct 03 13:46:26 crc kubenswrapper[4861]: I1003 13:46:26.785449 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" event={"ID":"49f1e7a1-ab0a-46b2-97c6-a069d913657d","Type":"ContainerStarted","Data":"a5ed626e359ae74723ff48e4b2ac469153386caadee476900fb45765bfeb9c40"} Oct 03 13:46:26 crc kubenswrapper[4861]: I1003 13:46:26.786140 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:26 crc kubenswrapper[4861]: I1003 13:46:26.787985 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" Oct 03 13:46:26 crc kubenswrapper[4861]: I1003 13:46:26.817411 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5b7969687c-wz6wm" podStartSLOduration=2.44777947 podStartE2EDuration="12.817386132s" podCreationTimestamp="2025-10-03 13:46:14 +0000 UTC" firstStartedPulling="2025-10-03 13:46:15.322975042 +0000 UTC m=+889.320960079" lastFinishedPulling="2025-10-03 13:46:25.692581694 +0000 UTC m=+899.690566741" observedRunningTime="2025-10-03 13:46:26.815226235 +0000 UTC m=+900.813211282" watchObservedRunningTime="2025-10-03 13:46:26.817386132 +0000 UTC m=+900.815371209" Oct 03 13:46:30 crc kubenswrapper[4861]: I1003 13:46:30.144789 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:46:30 crc kubenswrapper[4861]: I1003 13:46:30.145070 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.265793 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.267528 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:46:59 crc kubenswrapper[4861]: W1003 13:46:59.270720 4861 reflector.go:561] object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zkqlh": failed to list *v1.Secret: secrets "barbican-operator-controller-manager-dockercfg-zkqlh" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Oct 03 13:46:59 crc kubenswrapper[4861]: E1003 13:46:59.270936 4861 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"barbican-operator-controller-manager-dockercfg-zkqlh\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"barbican-operator-controller-manager-dockercfg-zkqlh\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.296693 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.301595 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.302857 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.306607 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-77bsf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.317105 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.318033 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.321263 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-xcgcz" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.328390 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.350852 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.372503 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.373382 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.378601 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-tv8zx" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.389176 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsrrm\" (UniqueName: \"kubernetes.io/projected/a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc-kube-api-access-hsrrm\") pod \"cinder-operator-controller-manager-79d68d6c85-sfz28\" (UID: \"a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.389273 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hz4p\" (UniqueName: \"kubernetes.io/projected/b7d80f0f-8c96-446e-a31e-90913d19d661-kube-api-access-5hz4p\") pod \"barbican-operator-controller-manager-6c675fb79f-vjdcg\" (UID: \"b7d80f0f-8c96-446e-a31e-90913d19d661\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.389356 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-ch9nw"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.390302 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.396563 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-27v9d" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.398991 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.410718 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-ch9nw"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.418517 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.419765 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.421593 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-xp2s2" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.442865 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.455928 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.457625 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.463497 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.463773 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-x5469" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.475183 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.476562 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.491324 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-tf8pc" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.493605 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfmll\" (UniqueName: \"kubernetes.io/projected/5a3fecb8-5f79-4f05-9169-7d5cf9072f2c-kube-api-access-qfmll\") pod \"glance-operator-controller-manager-846dff85b5-qkqbk\" (UID: \"5a3fecb8-5f79-4f05-9169-7d5cf9072f2c\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.493656 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hz4p\" (UniqueName: \"kubernetes.io/projected/b7d80f0f-8c96-446e-a31e-90913d19d661-kube-api-access-5hz4p\") pod \"barbican-operator-controller-manager-6c675fb79f-vjdcg\" (UID: \"b7d80f0f-8c96-446e-a31e-90913d19d661\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.493747 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsrrm\" (UniqueName: \"kubernetes.io/projected/a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc-kube-api-access-hsrrm\") pod \"cinder-operator-controller-manager-79d68d6c85-sfz28\" (UID: \"a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.493790 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw222\" (UniqueName: \"kubernetes.io/projected/24be9f7b-3c61-4434-8863-b3b5d9e5ee2a-kube-api-access-kw222\") pod \"designate-operator-controller-manager-75dfd9b554-4nvdf\" (UID: \"24be9f7b-3c61-4434-8863-b3b5d9e5ee2a\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.493817 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgnns\" (UniqueName: \"kubernetes.io/projected/20ad49e2-5077-43e2-8671-58457cf10432-kube-api-access-sgnns\") pod \"heat-operator-controller-manager-599898f689-ch9nw\" (UID: \"20ad49e2-5077-43e2-8671-58457cf10432\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.508067 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.509342 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.521297 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.521965 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-g4qzj" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.532628 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.546160 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.557076 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hz4p\" (UniqueName: \"kubernetes.io/projected/b7d80f0f-8c96-446e-a31e-90913d19d661-kube-api-access-5hz4p\") pod \"barbican-operator-controller-manager-6c675fb79f-vjdcg\" (UID: \"b7d80f0f-8c96-446e-a31e-90913d19d661\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.561817 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.562473 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsrrm\" (UniqueName: \"kubernetes.io/projected/a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc-kube-api-access-hsrrm\") pod \"cinder-operator-controller-manager-79d68d6c85-sfz28\" (UID: \"a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.571609 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.576206 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.590637 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-mtlfw" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.639858 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.673062 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqjst\" (UniqueName: \"kubernetes.io/projected/567dc82d-835f-4cf9-805d-a3d65c82b823-kube-api-access-zqjst\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.685145 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl67c\" (UniqueName: \"kubernetes.io/projected/c7d483ab-e555-49c8-93c9-8bb99928605a-kube-api-access-nl67c\") pod \"manila-operator-controller-manager-6fd6854b49-x5mwn\" (UID: \"c7d483ab-e555-49c8-93c9-8bb99928605a\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.685533 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfmll\" (UniqueName: \"kubernetes.io/projected/5a3fecb8-5f79-4f05-9169-7d5cf9072f2c-kube-api-access-qfmll\") pod \"glance-operator-controller-manager-846dff85b5-qkqbk\" (UID: \"5a3fecb8-5f79-4f05-9169-7d5cf9072f2c\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.685626 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfcw4\" (UniqueName: \"kubernetes.io/projected/cb92f20f-a3f0-42b3-ae87-11e0215c62fb-kube-api-access-qfcw4\") pod \"ironic-operator-controller-manager-84bc9db6cc-mlqw7\" (UID: \"cb92f20f-a3f0-42b3-ae87-11e0215c62fb\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.685806 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwxmv\" (UniqueName: \"kubernetes.io/projected/c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1-kube-api-access-jwxmv\") pod \"horizon-operator-controller-manager-6769b867d9-qcfrn\" (UID: \"c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.685968 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsc6f\" (UniqueName: \"kubernetes.io/projected/685fbda0-ab44-4f3c-8614-d87234d29d2f-kube-api-access-lsc6f\") pod \"keystone-operator-controller-manager-7f55849f88-k4wkd\" (UID: \"685fbda0-ab44-4f3c-8614-d87234d29d2f\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.686117 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.686311 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw222\" (UniqueName: \"kubernetes.io/projected/24be9f7b-3c61-4434-8863-b3b5d9e5ee2a-kube-api-access-kw222\") pod \"designate-operator-controller-manager-75dfd9b554-4nvdf\" (UID: \"24be9f7b-3c61-4434-8863-b3b5d9e5ee2a\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.686459 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgnns\" (UniqueName: \"kubernetes.io/projected/20ad49e2-5077-43e2-8671-58457cf10432-kube-api-access-sgnns\") pod \"heat-operator-controller-manager-599898f689-ch9nw\" (UID: \"20ad49e2-5077-43e2-8671-58457cf10432\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.703451 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.736500 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.771805 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfmll\" (UniqueName: \"kubernetes.io/projected/5a3fecb8-5f79-4f05-9169-7d5cf9072f2c-kube-api-access-qfmll\") pod \"glance-operator-controller-manager-846dff85b5-qkqbk\" (UID: \"5a3fecb8-5f79-4f05-9169-7d5cf9072f2c\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.774916 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-8lvnc" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.779702 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw222\" (UniqueName: \"kubernetes.io/projected/24be9f7b-3c61-4434-8863-b3b5d9e5ee2a-kube-api-access-kw222\") pod \"designate-operator-controller-manager-75dfd9b554-4nvdf\" (UID: \"24be9f7b-3c61-4434-8863-b3b5d9e5ee2a\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.783150 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.786420 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgnns\" (UniqueName: \"kubernetes.io/projected/20ad49e2-5077-43e2-8671-58457cf10432-kube-api-access-sgnns\") pod \"heat-operator-controller-manager-599898f689-ch9nw\" (UID: \"20ad49e2-5077-43e2-8671-58457cf10432\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.790250 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.790519 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqjst\" (UniqueName: \"kubernetes.io/projected/567dc82d-835f-4cf9-805d-a3d65c82b823-kube-api-access-zqjst\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.790640 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl67c\" (UniqueName: \"kubernetes.io/projected/c7d483ab-e555-49c8-93c9-8bb99928605a-kube-api-access-nl67c\") pod \"manila-operator-controller-manager-6fd6854b49-x5mwn\" (UID: \"c7d483ab-e555-49c8-93c9-8bb99928605a\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.790717 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfcw4\" (UniqueName: \"kubernetes.io/projected/cb92f20f-a3f0-42b3-ae87-11e0215c62fb-kube-api-access-qfcw4\") pod \"ironic-operator-controller-manager-84bc9db6cc-mlqw7\" (UID: \"cb92f20f-a3f0-42b3-ae87-11e0215c62fb\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.790800 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwxmv\" (UniqueName: \"kubernetes.io/projected/c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1-kube-api-access-jwxmv\") pod \"horizon-operator-controller-manager-6769b867d9-qcfrn\" (UID: \"c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.790923 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsc6f\" (UniqueName: \"kubernetes.io/projected/685fbda0-ab44-4f3c-8614-d87234d29d2f-kube-api-access-lsc6f\") pod \"keystone-operator-controller-manager-7f55849f88-k4wkd\" (UID: \"685fbda0-ab44-4f3c-8614-d87234d29d2f\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.791029 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2qgc\" (UniqueName: \"kubernetes.io/projected/9bf321cf-e938-46ed-b8b9-01418f85de45-kube-api-access-h2qgc\") pod \"mariadb-operator-controller-manager-5c468bf4d4-rx55k\" (UID: \"9bf321cf-e938-46ed-b8b9-01418f85de45\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:46:59 crc kubenswrapper[4861]: E1003 13:46:59.791312 4861 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 03 13:46:59 crc kubenswrapper[4861]: E1003 13:46:59.791458 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert podName:567dc82d-835f-4cf9-805d-a3d65c82b823 nodeName:}" failed. No retries permitted until 2025-10-03 13:47:00.291435483 +0000 UTC m=+934.289420530 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert") pod "infra-operator-controller-manager-5fbf469cd7-8dttf" (UID: "567dc82d-835f-4cf9-805d-a3d65c82b823") : secret "infra-operator-webhook-server-cert" not found Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.810288 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.811601 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.814294 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.815620 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.826105 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-x4hm4" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.827035 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-27x65" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.834958 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwxmv\" (UniqueName: \"kubernetes.io/projected/c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1-kube-api-access-jwxmv\") pod \"horizon-operator-controller-manager-6769b867d9-qcfrn\" (UID: \"c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.835580 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqjst\" (UniqueName: \"kubernetes.io/projected/567dc82d-835f-4cf9-805d-a3d65c82b823-kube-api-access-zqjst\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.839764 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfcw4\" (UniqueName: \"kubernetes.io/projected/cb92f20f-a3f0-42b3-ae87-11e0215c62fb-kube-api-access-qfcw4\") pod \"ironic-operator-controller-manager-84bc9db6cc-mlqw7\" (UID: \"cb92f20f-a3f0-42b3-ae87-11e0215c62fb\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.841648 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.844927 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl67c\" (UniqueName: \"kubernetes.io/projected/c7d483ab-e555-49c8-93c9-8bb99928605a-kube-api-access-nl67c\") pod \"manila-operator-controller-manager-6fd6854b49-x5mwn\" (UID: \"c7d483ab-e555-49c8-93c9-8bb99928605a\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.848362 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsc6f\" (UniqueName: \"kubernetes.io/projected/685fbda0-ab44-4f3c-8614-d87234d29d2f-kube-api-access-lsc6f\") pod \"keystone-operator-controller-manager-7f55849f88-k4wkd\" (UID: \"685fbda0-ab44-4f3c-8614-d87234d29d2f\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.859247 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.892209 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcwq9\" (UniqueName: \"kubernetes.io/projected/fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8-kube-api-access-vcwq9\") pod \"nova-operator-controller-manager-555c7456bd-v26h5\" (UID: \"fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.892492 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2qgc\" (UniqueName: \"kubernetes.io/projected/9bf321cf-e938-46ed-b8b9-01418f85de45-kube-api-access-h2qgc\") pod \"mariadb-operator-controller-manager-5c468bf4d4-rx55k\" (UID: \"9bf321cf-e938-46ed-b8b9-01418f85de45\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.892622 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr6l9\" (UniqueName: \"kubernetes.io/projected/57962592-42ae-47a9-a651-7c5d0e3ffad5-kube-api-access-zr6l9\") pod \"neutron-operator-controller-manager-6574bf987d-6qwsn\" (UID: \"57962592-42ae-47a9-a651-7c5d0e3ffad5\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.910305 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.911385 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.937023 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.937724 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.946479 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.948981 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.954894 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-jh7pc" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.956031 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-xgt76" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.956302 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.958530 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77"] Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.960219 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:46:59 crc kubenswrapper[4861]: I1003 13:46:59.970796 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-w4csx" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:46:59.994080 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr6l9\" (UniqueName: \"kubernetes.io/projected/57962592-42ae-47a9-a651-7c5d0e3ffad5-kube-api-access-zr6l9\") pod \"neutron-operator-controller-manager-6574bf987d-6qwsn\" (UID: \"57962592-42ae-47a9-a651-7c5d0e3ffad5\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:46:59.994143 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:46:59.994168 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8nqb\" (UniqueName: \"kubernetes.io/projected/5f7b92ed-6113-4c12-a8ec-25589c15dd32-kube-api-access-m8nqb\") pod \"ovn-operator-controller-manager-688db7b6c7-xgq77\" (UID: \"5f7b92ed-6113-4c12-a8ec-25589c15dd32\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:46:59.994194 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcwq9\" (UniqueName: \"kubernetes.io/projected/fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8-kube-api-access-vcwq9\") pod \"nova-operator-controller-manager-555c7456bd-v26h5\" (UID: \"fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:46:59.994359 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2qnm\" (UniqueName: \"kubernetes.io/projected/16695342-b32b-4303-b248-d616d2ab9676-kube-api-access-d2qnm\") pod \"octavia-operator-controller-manager-59d6cfdf45-qlrdc\" (UID: \"16695342-b32b-4303-b248-d616d2ab9676\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:46:59.994399 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6ljk\" (UniqueName: \"kubernetes.io/projected/274f0db6-b0be-41da-a6ca-47160736f8e8-kube-api-access-r6ljk\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.003314 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.005150 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2qgc\" (UniqueName: \"kubernetes.io/projected/9bf321cf-e938-46ed-b8b9-01418f85de45-kube-api-access-h2qgc\") pod \"mariadb-operator-controller-manager-5c468bf4d4-rx55k\" (UID: \"9bf321cf-e938-46ed-b8b9-01418f85de45\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.007206 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.018217 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.021300 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.046395 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcwq9\" (UniqueName: \"kubernetes.io/projected/fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8-kube-api-access-vcwq9\") pod \"nova-operator-controller-manager-555c7456bd-v26h5\" (UID: \"fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.047100 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.047219 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.049104 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.050820 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr6l9\" (UniqueName: \"kubernetes.io/projected/57962592-42ae-47a9-a651-7c5d0e3ffad5-kube-api-access-zr6l9\") pod \"neutron-operator-controller-manager-6574bf987d-6qwsn\" (UID: \"57962592-42ae-47a9-a651-7c5d0e3ffad5\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.062304 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.063685 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.088371 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-rdng9" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.092197 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.103485 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8nqb\" (UniqueName: \"kubernetes.io/projected/5f7b92ed-6113-4c12-a8ec-25589c15dd32-kube-api-access-m8nqb\") pod \"ovn-operator-controller-manager-688db7b6c7-xgq77\" (UID: \"5f7b92ed-6113-4c12-a8ec-25589c15dd32\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.103547 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqvvv\" (UniqueName: \"kubernetes.io/projected/92f8bc55-a8b1-41dd-9490-12c2280106ed-kube-api-access-qqvvv\") pod \"placement-operator-controller-manager-7d8bb7f44c-5tm2n\" (UID: \"92f8bc55-a8b1-41dd-9490-12c2280106ed\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.103580 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2qnm\" (UniqueName: \"kubernetes.io/projected/16695342-b32b-4303-b248-d616d2ab9676-kube-api-access-d2qnm\") pod \"octavia-operator-controller-manager-59d6cfdf45-qlrdc\" (UID: \"16695342-b32b-4303-b248-d616d2ab9676\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.103612 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6ljk\" (UniqueName: \"kubernetes.io/projected/274f0db6-b0be-41da-a6ca-47160736f8e8-kube-api-access-r6ljk\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.103686 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:00 crc kubenswrapper[4861]: E1003 13:47:00.103816 4861 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:47:00 crc kubenswrapper[4861]: E1003 13:47:00.103859 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert podName:274f0db6-b0be-41da-a6ca-47160736f8e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:47:00.603845727 +0000 UTC m=+934.601830774 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert") pod "openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" (UID: "274f0db6-b0be-41da-a6ca-47160736f8e8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.112299 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.113917 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.115031 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.127562 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.139008 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2qnm\" (UniqueName: \"kubernetes.io/projected/16695342-b32b-4303-b248-d616d2ab9676-kube-api-access-d2qnm\") pod \"octavia-operator-controller-manager-59d6cfdf45-qlrdc\" (UID: \"16695342-b32b-4303-b248-d616d2ab9676\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.139086 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.144587 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-z6m6z" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.146303 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.146359 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.146505 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.147174 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d8c9692c5afc364d3518624df673f7e65a1fe92e46a015f8a19e45916a42b14c"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.147254 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://d8c9692c5afc364d3518624df673f7e65a1fe92e46a015f8a19e45916a42b14c" gracePeriod=600 Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.150751 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8nqb\" (UniqueName: \"kubernetes.io/projected/5f7b92ed-6113-4c12-a8ec-25589c15dd32-kube-api-access-m8nqb\") pod \"ovn-operator-controller-manager-688db7b6c7-xgq77\" (UID: \"5f7b92ed-6113-4c12-a8ec-25589c15dd32\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.157088 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.158625 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.160107 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6ljk\" (UniqueName: \"kubernetes.io/projected/274f0db6-b0be-41da-a6ca-47160736f8e8-kube-api-access-r6ljk\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.190884 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-r2g7d" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.195481 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.197039 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.206287 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.211588 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqvvv\" (UniqueName: \"kubernetes.io/projected/92f8bc55-a8b1-41dd-9490-12c2280106ed-kube-api-access-qqvvv\") pod \"placement-operator-controller-manager-7d8bb7f44c-5tm2n\" (UID: \"92f8bc55-a8b1-41dd-9490-12c2280106ed\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.211655 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl5r4\" (UniqueName: \"kubernetes.io/projected/682b0ab4-202c-4455-872c-715e9e6c4ee1-kube-api-access-nl5r4\") pod \"telemetry-operator-controller-manager-5db5cf686f-5tfpv\" (UID: \"682b0ab4-202c-4455-872c-715e9e6c4ee1\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.211798 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhjxk\" (UniqueName: \"kubernetes.io/projected/1fa5571a-b9b5-4395-aa7a-a32a670f8e92-kube-api-access-bhjxk\") pod \"swift-operator-controller-manager-6859f9b676-9wwkj\" (UID: \"1fa5571a-b9b5-4395-aa7a-a32a670f8e92\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.232587 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.233948 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.238745 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-p629z" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.242438 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.246504 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.250953 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-gnvzj" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.254929 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.276006 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqvvv\" (UniqueName: \"kubernetes.io/projected/92f8bc55-a8b1-41dd-9490-12c2280106ed-kube-api-access-qqvvv\") pod \"placement-operator-controller-manager-7d8bb7f44c-5tm2n\" (UID: \"92f8bc55-a8b1-41dd-9490-12c2280106ed\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.283186 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.305296 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.315504 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl5r4\" (UniqueName: \"kubernetes.io/projected/682b0ab4-202c-4455-872c-715e9e6c4ee1-kube-api-access-nl5r4\") pod \"telemetry-operator-controller-manager-5db5cf686f-5tfpv\" (UID: \"682b0ab4-202c-4455-872c-715e9e6c4ee1\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.315540 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbnzg\" (UniqueName: \"kubernetes.io/projected/0ab6f3ab-52bf-404a-8102-195683e803e8-kube-api-access-mbnzg\") pod \"watcher-operator-controller-manager-fcd7d9895-mwpqn\" (UID: \"0ab6f3ab-52bf-404a-8102-195683e803e8\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.315565 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.315606 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5zx8\" (UniqueName: \"kubernetes.io/projected/896120d6-4995-4fd4-a238-4b34c6128326-kube-api-access-c5zx8\") pod \"test-operator-controller-manager-5cd5cb47d7-stsgz\" (UID: \"896120d6-4995-4fd4-a238-4b34c6128326\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.315635 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhjxk\" (UniqueName: \"kubernetes.io/projected/1fa5571a-b9b5-4395-aa7a-a32a670f8e92-kube-api-access-bhjxk\") pod \"swift-operator-controller-manager-6859f9b676-9wwkj\" (UID: \"1fa5571a-b9b5-4395-aa7a-a32a670f8e92\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:47:00 crc kubenswrapper[4861]: E1003 13:47:00.315910 4861 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 03 13:47:00 crc kubenswrapper[4861]: E1003 13:47:00.315957 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert podName:567dc82d-835f-4cf9-805d-a3d65c82b823 nodeName:}" failed. No retries permitted until 2025-10-03 13:47:01.31593291 +0000 UTC m=+935.313917957 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert") pod "infra-operator-controller-manager-5fbf469cd7-8dttf" (UID: "567dc82d-835f-4cf9-805d-a3d65c82b823") : secret "infra-operator-webhook-server-cert" not found Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.353055 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhjxk\" (UniqueName: \"kubernetes.io/projected/1fa5571a-b9b5-4395-aa7a-a32a670f8e92-kube-api-access-bhjxk\") pod \"swift-operator-controller-manager-6859f9b676-9wwkj\" (UID: \"1fa5571a-b9b5-4395-aa7a-a32a670f8e92\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.364767 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.380960 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl5r4\" (UniqueName: \"kubernetes.io/projected/682b0ab4-202c-4455-872c-715e9e6c4ee1-kube-api-access-nl5r4\") pod \"telemetry-operator-controller-manager-5db5cf686f-5tfpv\" (UID: \"682b0ab4-202c-4455-872c-715e9e6c4ee1\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.399623 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.425676 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbnzg\" (UniqueName: \"kubernetes.io/projected/0ab6f3ab-52bf-404a-8102-195683e803e8-kube-api-access-mbnzg\") pod \"watcher-operator-controller-manager-fcd7d9895-mwpqn\" (UID: \"0ab6f3ab-52bf-404a-8102-195683e803e8\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.425770 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5zx8\" (UniqueName: \"kubernetes.io/projected/896120d6-4995-4fd4-a238-4b34c6128326-kube-api-access-c5zx8\") pod \"test-operator-controller-manager-5cd5cb47d7-stsgz\" (UID: \"896120d6-4995-4fd4-a238-4b34c6128326\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.491603 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbnzg\" (UniqueName: \"kubernetes.io/projected/0ab6f3ab-52bf-404a-8102-195683e803e8-kube-api-access-mbnzg\") pod \"watcher-operator-controller-manager-fcd7d9895-mwpqn\" (UID: \"0ab6f3ab-52bf-404a-8102-195683e803e8\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.492191 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.499540 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5zx8\" (UniqueName: \"kubernetes.io/projected/896120d6-4995-4fd4-a238-4b34c6128326-kube-api-access-c5zx8\") pod \"test-operator-controller-manager-5cd5cb47d7-stsgz\" (UID: \"896120d6-4995-4fd4-a238-4b34c6128326\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.524801 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zkqlh" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.534043 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.535915 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.621348 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.630299 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:00 crc kubenswrapper[4861]: E1003 13:47:00.630460 4861 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:47:00 crc kubenswrapper[4861]: E1003 13:47:00.630513 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert podName:274f0db6-b0be-41da-a6ca-47160736f8e8 nodeName:}" failed. No retries permitted until 2025-10-03 13:47:01.630498549 +0000 UTC m=+935.628483596 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert") pod "openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" (UID: "274f0db6-b0be-41da-a6ca-47160736f8e8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.672619 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.679192 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.680497 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.689889 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.690678 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bwn4s" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.735870 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qrhv\" (UniqueName: \"kubernetes.io/projected/ea16cb5e-459c-4ad2-9579-17bd88783158-kube-api-access-8qrhv\") pod \"openstack-operator-controller-manager-8fd589ff7-d8tns\" (UID: \"ea16cb5e-459c-4ad2-9579-17bd88783158\") " pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.738666 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea16cb5e-459c-4ad2-9579-17bd88783158-cert\") pod \"openstack-operator-controller-manager-8fd589ff7-d8tns\" (UID: \"ea16cb5e-459c-4ad2-9579-17bd88783158\") " pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.773310 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.773351 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.773363 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.777184 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq"] Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.777558 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.789130 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-2llkp" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.848029 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea16cb5e-459c-4ad2-9579-17bd88783158-cert\") pod \"openstack-operator-controller-manager-8fd589ff7-d8tns\" (UID: \"ea16cb5e-459c-4ad2-9579-17bd88783158\") " pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.848109 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x69n8\" (UniqueName: \"kubernetes.io/projected/68cf4faf-6f3d-4dfe-9a86-22a803baf77c-kube-api-access-x69n8\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-x75vq\" (UID: \"68cf4faf-6f3d-4dfe-9a86-22a803baf77c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.848274 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qrhv\" (UniqueName: \"kubernetes.io/projected/ea16cb5e-459c-4ad2-9579-17bd88783158-kube-api-access-8qrhv\") pod \"openstack-operator-controller-manager-8fd589ff7-d8tns\" (UID: \"ea16cb5e-459c-4ad2-9579-17bd88783158\") " pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.871947 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea16cb5e-459c-4ad2-9579-17bd88783158-cert\") pod \"openstack-operator-controller-manager-8fd589ff7-d8tns\" (UID: \"ea16cb5e-459c-4ad2-9579-17bd88783158\") " pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.881656 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qrhv\" (UniqueName: \"kubernetes.io/projected/ea16cb5e-459c-4ad2-9579-17bd88783158-kube-api-access-8qrhv\") pod \"openstack-operator-controller-manager-8fd589ff7-d8tns\" (UID: \"ea16cb5e-459c-4ad2-9579-17bd88783158\") " pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.949110 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x69n8\" (UniqueName: \"kubernetes.io/projected/68cf4faf-6f3d-4dfe-9a86-22a803baf77c-kube-api-access-x69n8\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-x75vq\" (UID: \"68cf4faf-6f3d-4dfe-9a86-22a803baf77c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" Oct 03 13:47:00 crc kubenswrapper[4861]: I1003 13:47:00.972664 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x69n8\" (UniqueName: \"kubernetes.io/projected/68cf4faf-6f3d-4dfe-9a86-22a803baf77c-kube-api-access-x69n8\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-x75vq\" (UID: \"68cf4faf-6f3d-4dfe-9a86-22a803baf77c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.007485 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" event={"ID":"a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc","Type":"ContainerStarted","Data":"5cbf1b8643597888975fa20f22b4646be994316accc62a2fb60398ed800ccafa"} Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.010179 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="d8c9692c5afc364d3518624df673f7e65a1fe92e46a015f8a19e45916a42b14c" exitCode=0 Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.010209 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"d8c9692c5afc364d3518624df673f7e65a1fe92e46a015f8a19e45916a42b14c"} Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.010270 4861 scope.go:117] "RemoveContainer" containerID="d80b8d3116622b7c143b4b78ef1cc92a62fa6958e50f67859a9a0dce0c925ac5" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.116179 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.132188 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.157817 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-ch9nw"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.181558 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.194988 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.346486 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.365893 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.378433 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.388859 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/567dc82d-835f-4cf9-805d-a3d65c82b823-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-8dttf\" (UID: \"567dc82d-835f-4cf9-805d-a3d65c82b823\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.456757 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn"] Oct 03 13:47:01 crc kubenswrapper[4861]: W1003 13:47:01.459066 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3e0bf46_a854_4e79_a1d4_4cb5d9c5eaf1.slice/crio-e21c5c9a5d32bd161cad2759a6921a97cdfbfd4eee0a400c9a761e9b725a7250 WatchSource:0}: Error finding container e21c5c9a5d32bd161cad2759a6921a97cdfbfd4eee0a400c9a761e9b725a7250: Status 404 returned error can't find the container with id e21c5c9a5d32bd161cad2759a6921a97cdfbfd4eee0a400c9a761e9b725a7250 Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.503380 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.593557 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.602553 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7"] Oct 03 13:47:01 crc kubenswrapper[4861]: W1003 13:47:01.602795 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57962592_42ae_47a9_a651_7c5d0e3ffad5.slice/crio-6d639408c0267a5e5c2ba8ea1261948e2ae016e38db3414536bc3361c44d37d5 WatchSource:0}: Error finding container 6d639408c0267a5e5c2ba8ea1261948e2ae016e38db3414536bc3361c44d37d5: Status 404 returned error can't find the container with id 6d639408c0267a5e5c2ba8ea1261948e2ae016e38db3414536bc3361c44d37d5 Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.684580 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.698034 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/274f0db6-b0be-41da-a6ca-47160736f8e8-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp\" (UID: \"274f0db6-b0be-41da-a6ca-47160736f8e8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.726685 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.735986 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.791286 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc"] Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.847351 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:01 crc kubenswrapper[4861]: I1003 13:47:01.923343 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz"] Oct 03 13:47:01 crc kubenswrapper[4861]: W1003 13:47:01.946193 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod896120d6_4995_4fd4_a238_4b34c6128326.slice/crio-e6e3c277ebcd47eb65795ad7849c7f92bf29cd0418a1e1185667d1a7eaf761fe WatchSource:0}: Error finding container e6e3c277ebcd47eb65795ad7849c7f92bf29cd0418a1e1185667d1a7eaf761fe: Status 404 returned error can't find the container with id e6e3c277ebcd47eb65795ad7849c7f92bf29cd0418a1e1185667d1a7eaf761fe Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.024324 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" event={"ID":"fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8","Type":"ContainerStarted","Data":"bd1622c67f759e5db71bce950d9cd20b656267cbd46d857c99bb233fc0165a10"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.026922 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" event={"ID":"16695342-b32b-4303-b248-d616d2ab9676","Type":"ContainerStarted","Data":"5ece641b99ad24e8eb5e763d32ee22a44a345a941a7943566c06ccde9a558714"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.034302 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" event={"ID":"5a3fecb8-5f79-4f05-9169-7d5cf9072f2c","Type":"ContainerStarted","Data":"9fdfd1ce2b775ce77b3975b4e216d921b2f4e08abb73b554855747c809b7b75b"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.039273 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" event={"ID":"896120d6-4995-4fd4-a238-4b34c6128326","Type":"ContainerStarted","Data":"e6e3c277ebcd47eb65795ad7849c7f92bf29cd0418a1e1185667d1a7eaf761fe"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.073359 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"7c374cec0027a71985e4c5ed0abe80567ca6f2e53b91f0c5eb7af2198510c7d3"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.087493 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" event={"ID":"9bf321cf-e938-46ed-b8b9-01418f85de45","Type":"ContainerStarted","Data":"073dc1d02a76cd81a9a110184d3e5a2610c7daf014ae73b24b95d4e79cbf309c"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.106726 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" event={"ID":"57962592-42ae-47a9-a651-7c5d0e3ffad5","Type":"ContainerStarted","Data":"6d639408c0267a5e5c2ba8ea1261948e2ae016e38db3414536bc3361c44d37d5"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.109742 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" event={"ID":"c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1","Type":"ContainerStarted","Data":"e21c5c9a5d32bd161cad2759a6921a97cdfbfd4eee0a400c9a761e9b725a7250"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.115191 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" event={"ID":"c7d483ab-e555-49c8-93c9-8bb99928605a","Type":"ContainerStarted","Data":"790a6b00ba9eaabef9e8f4ec02173e27b0294fb4bd37766d692e20343d35762a"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.126244 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77"] Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.132386 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" event={"ID":"685fbda0-ab44-4f3c-8614-d87234d29d2f","Type":"ContainerStarted","Data":"7dfcbfa3f4056b4861fe083f679e1fbe20d88c298e9dfeeaf0cebdf19e951d79"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.134942 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns"] Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.142141 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" event={"ID":"24be9f7b-3c61-4434-8863-b3b5d9e5ee2a","Type":"ContainerStarted","Data":"5fbde5485a18778ce2d6f0018f6cc7ad4096a439f13f4521a2703139c376c4f9"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.150563 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" event={"ID":"20ad49e2-5077-43e2-8671-58457cf10432","Type":"ContainerStarted","Data":"6aa446ae2cd45b7091836fcf71a1172a89db3a9aecf042525744f37caca836fd"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.154088 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" event={"ID":"cb92f20f-a3f0-42b3-ae87-11e0215c62fb","Type":"ContainerStarted","Data":"4c729ba2bd2240fccf086e14e8e526183ad3d726a460e1135835e5849b3a9705"} Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.165392 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf"] Oct 03 13:47:02 crc kubenswrapper[4861]: W1003 13:47:02.166627 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f7b92ed_6113_4c12_a8ec_25589c15dd32.slice/crio-a798d2bb609404f17b89123abd47b1acb00cc496fc65c1c89cfc145dce36efbe WatchSource:0}: Error finding container a798d2bb609404f17b89123abd47b1acb00cc496fc65c1c89cfc145dce36efbe: Status 404 returned error can't find the container with id a798d2bb609404f17b89123abd47b1acb00cc496fc65c1c89cfc145dce36efbe Oct 03 13:47:02 crc kubenswrapper[4861]: W1003 13:47:02.181880 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod567dc82d_835f_4cf9_805d_a3d65c82b823.slice/crio-53d857da2c4e530269fde68cf55c025888068e668ad4a281dace52efd1475779 WatchSource:0}: Error finding container 53d857da2c4e530269fde68cf55c025888068e668ad4a281dace52efd1475779: Status 404 returned error can't find the container with id 53d857da2c4e530269fde68cf55c025888068e668ad4a281dace52efd1475779 Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.282036 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp"] Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.319055 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv"] Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.353411 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq"] Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.360506 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg"] Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.364134 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x69n8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-x75vq_openstack-operators(68cf4faf-6f3d-4dfe-9a86-22a803baf77c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.364306 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nl5r4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5db5cf686f-5tfpv_openstack-operators(682b0ab4-202c-4455-872c-715e9e6c4ee1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.365313 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" podUID="68cf4faf-6f3d-4dfe-9a86-22a803baf77c" Oct 03 13:47:02 crc kubenswrapper[4861]: W1003 13:47:02.366863 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7d80f0f_8c96_446e_a31e_90913d19d661.slice/crio-15ac2ae38540333646c2d13c43dabf8c8e5c9fe3983052b19752742ee3688d92 WatchSource:0}: Error finding container 15ac2ae38540333646c2d13c43dabf8c8e5c9fe3983052b19752742ee3688d92: Status 404 returned error can't find the container with id 15ac2ae38540333646c2d13c43dabf8c8e5c9fe3983052b19752742ee3688d92 Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.370408 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn"] Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.372508 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mbnzg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-fcd7d9895-mwpqn_openstack-operators(0ab6f3ab-52bf-404a-8102-195683e803e8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.372651 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:91e11b31f0c969125d9883b1f765e7c99a62f639b11fab568dec82b38f8cfe74,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5hz4p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6c675fb79f-vjdcg_openstack-operators(b7d80f0f-8c96-446e-a31e-90913d19d661): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.375799 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n"] Oct 03 13:47:02 crc kubenswrapper[4861]: I1003 13:47:02.379888 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj"] Oct 03 13:47:02 crc kubenswrapper[4861]: W1003 13:47:02.384107 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92f8bc55_a8b1_41dd_9490_12c2280106ed.slice/crio-b79ca9dbed559a49f80a5f9a158c0799e99911dbd1b19e27a64b62b67bfcb570 WatchSource:0}: Error finding container b79ca9dbed559a49f80a5f9a158c0799e99911dbd1b19e27a64b62b67bfcb570: Status 404 returned error can't find the container with id b79ca9dbed559a49f80a5f9a158c0799e99911dbd1b19e27a64b62b67bfcb570 Oct 03 13:47:02 crc kubenswrapper[4861]: W1003 13:47:02.386741 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fa5571a_b9b5_4395_aa7a_a32a670f8e92.slice/crio-beae944c9f58d2e5f791ad70d3ad13b9a4b91b1fa40a0d1d385598e41ebc6553 WatchSource:0}: Error finding container beae944c9f58d2e5f791ad70d3ad13b9a4b91b1fa40a0d1d385598e41ebc6553: Status 404 returned error can't find the container with id beae944c9f58d2e5f791ad70d3ad13b9a4b91b1fa40a0d1d385598e41ebc6553 Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.387561 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qqvvv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-7d8bb7f44c-5tm2n_openstack-operators(92f8bc55-a8b1-41dd-9490-12c2280106ed): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.389024 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bhjxk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-9wwkj_openstack-operators(1fa5571a-b9b5-4395-aa7a-a32a670f8e92): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.743929 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" podUID="0ab6f3ab-52bf-404a-8102-195683e803e8" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.792297 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" podUID="682b0ab4-202c-4455-872c-715e9e6c4ee1" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.798189 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" podUID="92f8bc55-a8b1-41dd-9490-12c2280106ed" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.800989 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" podUID="1fa5571a-b9b5-4395-aa7a-a32a670f8e92" Oct 03 13:47:02 crc kubenswrapper[4861]: E1003 13:47:02.811282 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" podUID="b7d80f0f-8c96-446e-a31e-90913d19d661" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.160979 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" event={"ID":"b7d80f0f-8c96-446e-a31e-90913d19d661","Type":"ContainerStarted","Data":"80b5049888b69c5558983ba5721107782a247ae6ea2de60a6671c9c0cfd1bc1f"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.161024 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" event={"ID":"b7d80f0f-8c96-446e-a31e-90913d19d661","Type":"ContainerStarted","Data":"15ac2ae38540333646c2d13c43dabf8c8e5c9fe3983052b19752742ee3688d92"} Oct 03 13:47:03 crc kubenswrapper[4861]: E1003 13:47:03.163150 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:91e11b31f0c969125d9883b1f765e7c99a62f639b11fab568dec82b38f8cfe74\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" podUID="b7d80f0f-8c96-446e-a31e-90913d19d661" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.166845 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" event={"ID":"92f8bc55-a8b1-41dd-9490-12c2280106ed","Type":"ContainerStarted","Data":"8857f3e8ad4aef2fc01c4a8f9732f9e6b7ad0c58c81a28ecc8b3aeb649373425"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.166884 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" event={"ID":"92f8bc55-a8b1-41dd-9490-12c2280106ed","Type":"ContainerStarted","Data":"b79ca9dbed559a49f80a5f9a158c0799e99911dbd1b19e27a64b62b67bfcb570"} Oct 03 13:47:03 crc kubenswrapper[4861]: E1003 13:47:03.168588 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" podUID="92f8bc55-a8b1-41dd-9490-12c2280106ed" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.169578 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" event={"ID":"567dc82d-835f-4cf9-805d-a3d65c82b823","Type":"ContainerStarted","Data":"53d857da2c4e530269fde68cf55c025888068e668ad4a281dace52efd1475779"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.172826 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" event={"ID":"682b0ab4-202c-4455-872c-715e9e6c4ee1","Type":"ContainerStarted","Data":"86043972b32d27f307457e3a1723cd4988c41d587608f540d52420f86d86e7ce"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.172868 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" event={"ID":"682b0ab4-202c-4455-872c-715e9e6c4ee1","Type":"ContainerStarted","Data":"f2fc0a315ef3a269b8e46f3176fd9645d41708b9226c62ee4f38f8e7686deb76"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.184419 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" event={"ID":"274f0db6-b0be-41da-a6ca-47160736f8e8","Type":"ContainerStarted","Data":"d1307510ffb7a1f7f0796d386f95e90506417c26f09fa86015fc9410bac0e8d6"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.185478 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" event={"ID":"5f7b92ed-6113-4c12-a8ec-25589c15dd32","Type":"ContainerStarted","Data":"a798d2bb609404f17b89123abd47b1acb00cc496fc65c1c89cfc145dce36efbe"} Oct 03 13:47:03 crc kubenswrapper[4861]: E1003 13:47:03.186462 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" podUID="682b0ab4-202c-4455-872c-715e9e6c4ee1" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.188318 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" event={"ID":"ea16cb5e-459c-4ad2-9579-17bd88783158","Type":"ContainerStarted","Data":"91f605b268cbceae8c70bbb16b41b97be8eec965fec492d79ef5b6ca4b853212"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.188361 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" event={"ID":"ea16cb5e-459c-4ad2-9579-17bd88783158","Type":"ContainerStarted","Data":"b4f3d0f1bed25f13b4fbfbf3d6d75860544fad60bacc700c075f67e30ac8984d"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.188373 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" event={"ID":"ea16cb5e-459c-4ad2-9579-17bd88783158","Type":"ContainerStarted","Data":"ba10ce35ef6a71cc0f5f9d18bb31632d994d0affea31965fe5e812dbaeba4c6d"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.188971 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.189831 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" event={"ID":"0ab6f3ab-52bf-404a-8102-195683e803e8","Type":"ContainerStarted","Data":"d65100451dae41581bc6824cecb5e2669a8f1a5f14a39c0a460964dc3d6b62c2"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.189875 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" event={"ID":"0ab6f3ab-52bf-404a-8102-195683e803e8","Type":"ContainerStarted","Data":"011f1c8bb59df2a76dc9edd7715d48a752903735855bae01d831a7a97a9f2ab2"} Oct 03 13:47:03 crc kubenswrapper[4861]: E1003 13:47:03.191593 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" podUID="0ab6f3ab-52bf-404a-8102-195683e803e8" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.202186 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" event={"ID":"68cf4faf-6f3d-4dfe-9a86-22a803baf77c","Type":"ContainerStarted","Data":"8deebcd5ed5ef0e702f932044bdfc6d16f81ae141437f75471710b24017f7d60"} Oct 03 13:47:03 crc kubenswrapper[4861]: E1003 13:47:03.203784 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" podUID="68cf4faf-6f3d-4dfe-9a86-22a803baf77c" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.208449 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" event={"ID":"1fa5571a-b9b5-4395-aa7a-a32a670f8e92","Type":"ContainerStarted","Data":"af5be3d523ab6f5b47ae877f1e5e50bf5671a9d84fa5e0aa9c84c51856e6e9e4"} Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.208493 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" event={"ID":"1fa5571a-b9b5-4395-aa7a-a32a670f8e92","Type":"ContainerStarted","Data":"beae944c9f58d2e5f791ad70d3ad13b9a4b91b1fa40a0d1d385598e41ebc6553"} Oct 03 13:47:03 crc kubenswrapper[4861]: E1003 13:47:03.213790 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" podUID="1fa5571a-b9b5-4395-aa7a-a32a670f8e92" Oct 03 13:47:03 crc kubenswrapper[4861]: I1003 13:47:03.340709 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" podStartSLOduration=3.340687348 podStartE2EDuration="3.340687348s" podCreationTimestamp="2025-10-03 13:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:47:03.322761026 +0000 UTC m=+937.320746073" watchObservedRunningTime="2025-10-03 13:47:03.340687348 +0000 UTC m=+937.338672395" Oct 03 13:47:04 crc kubenswrapper[4861]: E1003 13:47:04.233475 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:91e11b31f0c969125d9883b1f765e7c99a62f639b11fab568dec82b38f8cfe74\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" podUID="b7d80f0f-8c96-446e-a31e-90913d19d661" Oct 03 13:47:04 crc kubenswrapper[4861]: E1003 13:47:04.233697 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" podUID="682b0ab4-202c-4455-872c-715e9e6c4ee1" Oct 03 13:47:04 crc kubenswrapper[4861]: E1003 13:47:04.233735 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" podUID="0ab6f3ab-52bf-404a-8102-195683e803e8" Oct 03 13:47:04 crc kubenswrapper[4861]: E1003 13:47:04.233760 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" podUID="1fa5571a-b9b5-4395-aa7a-a32a670f8e92" Oct 03 13:47:04 crc kubenswrapper[4861]: E1003 13:47:04.233797 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" podUID="68cf4faf-6f3d-4dfe-9a86-22a803baf77c" Oct 03 13:47:04 crc kubenswrapper[4861]: E1003 13:47:04.233828 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" podUID="92f8bc55-a8b1-41dd-9490-12c2280106ed" Oct 03 13:47:11 crc kubenswrapper[4861]: I1003 13:47:11.123864 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-8fd589ff7-d8tns" Oct 03 13:47:18 crc kubenswrapper[4861]: E1003 13:47:18.145827 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:585796b996a5b6d7ad68f0cb420bf4f2ee38c9f16f194e3111c162ce91ea8a7b" Oct 03 13:47:18 crc kubenswrapper[4861]: E1003 13:47:18.148127 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:585796b996a5b6d7ad68f0cb420bf4f2ee38c9f16f194e3111c162ce91ea8a7b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kw222,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-75dfd9b554-4nvdf_openstack-operators(24be9f7b-3c61-4434-8863-b3b5d9e5ee2a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:18 crc kubenswrapper[4861]: E1003 13:47:18.719984 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:9fed055cd1f09627ef351e61c7e42227570193ccd5d33167a607c49b442a9d87" Oct 03 13:47:18 crc kubenswrapper[4861]: E1003 13:47:18.720452 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:9fed055cd1f09627ef351e61c7e42227570193ccd5d33167a607c49b442a9d87,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qfmll,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-846dff85b5-qkqbk_openstack-operators(5a3fecb8-5f79-4f05-9169-7d5cf9072f2c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:19 crc kubenswrapper[4861]: E1003 13:47:19.334052 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:516f76ed86dd34225e6d0309451c7886bb81ff69032ba28125ae4d0cec54bce7" Oct 03 13:47:19 crc kubenswrapper[4861]: E1003 13:47:19.334256 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:516f76ed86dd34225e6d0309451c7886bb81ff69032ba28125ae4d0cec54bce7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lsc6f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7f55849f88-k4wkd_openstack-operators(685fbda0-ab44-4f3c-8614-d87234d29d2f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:19 crc kubenswrapper[4861]: E1003 13:47:19.864240 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:b7409dcf05c85eab205904d29d4276f8e927c772eba6363ecfa21ab10c4aaa01" Oct 03 13:47:19 crc kubenswrapper[4861]: E1003 13:47:19.864689 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:b7409dcf05c85eab205904d29d4276f8e927c772eba6363ecfa21ab10c4aaa01,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d2qnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-59d6cfdf45-qlrdc_openstack-operators(16695342-b32b-4303-b248-d616d2ab9676): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:20 crc kubenswrapper[4861]: E1003 13:47:20.846789 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:8fdb7ea8542adb2eca73f11bd78e6aebceed2ba7a1e9fdd149c75e0049d09ce0" Oct 03 13:47:20 crc kubenswrapper[4861]: E1003 13:47:20.847081 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:8fdb7ea8542adb2eca73f11bd78e6aebceed2ba7a1e9fdd149c75e0049d09ce0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nl67c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6fd6854b49-x5mwn_openstack-operators(c7d483ab-e555-49c8-93c9-8bb99928605a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:24 crc kubenswrapper[4861]: E1003 13:47:24.021743 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475" Oct 03 13:47:24 crc kubenswrapper[4861]: E1003 13:47:24.022386 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zqjst,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-5fbf469cd7-8dttf_openstack-operators(567dc82d-835f-4cf9-805d-a3d65c82b823): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:24 crc kubenswrapper[4861]: E1003 13:47:24.405259 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:a82409e6d6a5554aad95acfe6fa4784e33de19a963eb8b1da1a80a3e6cf1ab55" Oct 03 13:47:24 crc kubenswrapper[4861]: E1003 13:47:24.405435 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:a82409e6d6a5554aad95acfe6fa4784e33de19a963eb8b1da1a80a3e6cf1ab55,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vcwq9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-555c7456bd-v26h5_openstack-operators(fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:24 crc kubenswrapper[4861]: E1003 13:47:24.819999 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee" Oct 03 13:47:24 crc kubenswrapper[4861]: E1003 13:47:24.820514 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r6ljk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp_openstack-operators(274f0db6-b0be-41da-a6ca-47160736f8e8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:25 crc kubenswrapper[4861]: E1003 13:47:25.291913 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:1bd15383c53b5772acd12c376bd29b9fde0c2e6cca2cd493492fb1eeb9c52ef5" Oct 03 13:47:25 crc kubenswrapper[4861]: E1003 13:47:25.292095 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:1bd15383c53b5772acd12c376bd29b9fde0c2e6cca2cd493492fb1eeb9c52ef5,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sgnns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-599898f689-ch9nw_openstack-operators(20ad49e2-5077-43e2-8671-58457cf10432): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:25 crc kubenswrapper[4861]: E1003 13:47:25.614203 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95" Oct 03 13:47:25 crc kubenswrapper[4861]: E1003 13:47:25.614473 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwxmv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-6769b867d9-qcfrn_openstack-operators(c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:29 crc kubenswrapper[4861]: E1003 13:47:29.684292 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12" Oct 03 13:47:29 crc kubenswrapper[4861]: E1003 13:47:29.685116 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qfcw4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-84bc9db6cc-mlqw7_openstack-operators(cb92f20f-a3f0-42b3-ae87-11e0215c62fb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:30 crc kubenswrapper[4861]: E1003 13:47:30.099376 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94" Oct 03 13:47:30 crc kubenswrapper[4861]: E1003 13:47:30.099585 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m8nqb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-688db7b6c7-xgq77_openstack-operators(5f7b92ed-6113-4c12-a8ec-25589c15dd32): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:30 crc kubenswrapper[4861]: E1003 13:47:30.537589 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:110b885fe640ffdd8536e7da2a613677a6777e3d902e2ff15fa4d5968fe06c54" Oct 03 13:47:30 crc kubenswrapper[4861]: E1003 13:47:30.537793 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:110b885fe640ffdd8536e7da2a613677a6777e3d902e2ff15fa4d5968fe06c54,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h2qgc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-5c468bf4d4-rx55k_openstack-operators(9bf321cf-e938-46ed-b8b9-01418f85de45): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:31 crc kubenswrapper[4861]: E1003 13:47:31.761974 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Oct 03 13:47:31 crc kubenswrapper[4861]: E1003 13:47:31.762125 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x69n8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-x75vq_openstack-operators(68cf4faf-6f3d-4dfe-9a86-22a803baf77c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:31 crc kubenswrapper[4861]: E1003 13:47:31.763247 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" podUID="68cf4faf-6f3d-4dfe-9a86-22a803baf77c" Oct 03 13:47:32 crc kubenswrapper[4861]: E1003 13:47:32.400920 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6" Oct 03 13:47:32 crc kubenswrapper[4861]: E1003 13:47:32.401074 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mbnzg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-fcd7d9895-mwpqn_openstack-operators(0ab6f3ab-52bf-404a-8102-195683e803e8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:32 crc kubenswrapper[4861]: E1003 13:47:32.402271 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" podUID="0ab6f3ab-52bf-404a-8102-195683e803e8" Oct 03 13:47:32 crc kubenswrapper[4861]: E1003 13:47:32.948109 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa" Oct 03 13:47:32 crc kubenswrapper[4861]: E1003 13:47:32.948561 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nl5r4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5db5cf686f-5tfpv_openstack-operators(682b0ab4-202c-4455-872c-715e9e6c4ee1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:32 crc kubenswrapper[4861]: E1003 13:47:32.950313 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" podUID="682b0ab4-202c-4455-872c-715e9e6c4ee1" Oct 03 13:47:33 crc kubenswrapper[4861]: E1003 13:47:33.317071 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed" Oct 03 13:47:33 crc kubenswrapper[4861]: E1003 13:47:33.317296 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bhjxk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-9wwkj_openstack-operators(1fa5571a-b9b5-4395-aa7a-a32a670f8e92): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:33 crc kubenswrapper[4861]: E1003 13:47:33.318530 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" podUID="1fa5571a-b9b5-4395-aa7a-a32a670f8e92" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.204871 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:91e11b31f0c969125d9883b1f765e7c99a62f639b11fab568dec82b38f8cfe74" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.205115 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:91e11b31f0c969125d9883b1f765e7c99a62f639b11fab568dec82b38f8cfe74,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5hz4p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6c675fb79f-vjdcg_openstack-operators(b7d80f0f-8c96-446e-a31e-90913d19d661): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.206350 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" podUID="b7d80f0f-8c96-446e-a31e-90913d19d661" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.748583 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" podUID="16695342-b32b-4303-b248-d616d2ab9676" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.783162 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" podUID="24be9f7b-3c61-4434-8863-b3b5d9e5ee2a" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.784161 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" podUID="274f0db6-b0be-41da-a6ca-47160736f8e8" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.902257 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" podUID="c7d483ab-e555-49c8-93c9-8bb99928605a" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.902257 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" podUID="567dc82d-835f-4cf9-805d-a3d65c82b823" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.909145 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" podUID="685fbda0-ab44-4f3c-8614-d87234d29d2f" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.936494 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" podUID="cb92f20f-a3f0-42b3-ae87-11e0215c62fb" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.960100 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" podUID="5a3fecb8-5f79-4f05-9169-7d5cf9072f2c" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.963425 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" podUID="20ad49e2-5077-43e2-8671-58457cf10432" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.968407 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" podUID="fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.968482 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" podUID="5f7b92ed-6113-4c12-a8ec-25589c15dd32" Oct 03 13:47:34 crc kubenswrapper[4861]: E1003 13:47:34.985760 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" podUID="9bf321cf-e938-46ed-b8b9-01418f85de45" Oct 03 13:47:35 crc kubenswrapper[4861]: E1003 13:47:35.003147 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" podUID="c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1" Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.423190 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" event={"ID":"5a3fecb8-5f79-4f05-9169-7d5cf9072f2c","Type":"ContainerStarted","Data":"d1a36c695f645a1e12772f4032988aada6b513f2516753bcd19e298b6010c689"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.424752 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" event={"ID":"9bf321cf-e938-46ed-b8b9-01418f85de45","Type":"ContainerStarted","Data":"86aacc4463cf95a9db3baaa4a2fbf49a4fe9664f48229e39a92a533982025ee4"} Oct 03 13:47:35 crc kubenswrapper[4861]: E1003 13:47:35.426372 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:110b885fe640ffdd8536e7da2a613677a6777e3d902e2ff15fa4d5968fe06c54\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" podUID="9bf321cf-e938-46ed-b8b9-01418f85de45" Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.427140 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" event={"ID":"a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc","Type":"ContainerStarted","Data":"a7a612f0fa8caa09ae41ab5df8ba1d23acaf7ac09250b020c1826f99f808140b"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.428993 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" event={"ID":"92f8bc55-a8b1-41dd-9490-12c2280106ed","Type":"ContainerStarted","Data":"efa03fbf2cbf176080a41bb5bbbe848f25be425f590ad4b585a9952261b78fde"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.429797 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.434029 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" event={"ID":"c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1","Type":"ContainerStarted","Data":"bb169a051aee3e357263687315d209808e7905f8859abadc518e067ae256de2b"} Oct 03 13:47:35 crc kubenswrapper[4861]: E1003 13:47:35.435732 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" podUID="c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1" Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.438187 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" event={"ID":"567dc82d-835f-4cf9-805d-a3d65c82b823","Type":"ContainerStarted","Data":"6c5fd2a69c169a93f72e5a4befe278258ead48a9d1b8a9ae4b85ef635590d060"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.441817 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" event={"ID":"5f7b92ed-6113-4c12-a8ec-25589c15dd32","Type":"ContainerStarted","Data":"d25ad8e46cadb1e05ff63143fbd26d275655d68332c279835aaa9895a3b6e3b2"} Oct 03 13:47:35 crc kubenswrapper[4861]: E1003 13:47:35.443762 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" podUID="5f7b92ed-6113-4c12-a8ec-25589c15dd32" Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.444482 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" event={"ID":"16695342-b32b-4303-b248-d616d2ab9676","Type":"ContainerStarted","Data":"29ca2573d9675b574f2b0783330db37f9f2a908184d86dcc8315be68ee3a7437"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.446890 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" event={"ID":"274f0db6-b0be-41da-a6ca-47160736f8e8","Type":"ContainerStarted","Data":"31a5dd790bbf51111c6d44184ab5f2c9fd78921c4605b390662c0d16e55b9806"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.450125 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" event={"ID":"20ad49e2-5077-43e2-8671-58457cf10432","Type":"ContainerStarted","Data":"ea2bb61c3475bc472273c8eda4b8e9bfcc9166b0e9aab726ad8e9c396bb2911c"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.457095 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" event={"ID":"24be9f7b-3c61-4434-8863-b3b5d9e5ee2a","Type":"ContainerStarted","Data":"4ff587ada5215ed0e53edfc81b3bbac8e45d9e360aa728154ffbf2dd438397f8"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.474196 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" event={"ID":"896120d6-4995-4fd4-a238-4b34c6128326","Type":"ContainerStarted","Data":"52a7ee802f396616e743bb826709a82d1aa04cf11def4ce5fd03a6c11eaae2fd"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.474269 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" event={"ID":"896120d6-4995-4fd4-a238-4b34c6128326","Type":"ContainerStarted","Data":"73ffb3fcb71b852f357c14b62e288b60d4da99f287b099769580a2c2b645f9ed"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.481751 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" event={"ID":"fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8","Type":"ContainerStarted","Data":"cb66e420631d6eabf27cbb0ca311cfdb71a801c37e442eac6bbfe41f53681ba8"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.515469 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" event={"ID":"57962592-42ae-47a9-a651-7c5d0e3ffad5","Type":"ContainerStarted","Data":"7071f3086b52452c48102dca05f5e88023db7a70ee0e13f00b1183891948e66b"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.518588 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" event={"ID":"685fbda0-ab44-4f3c-8614-d87234d29d2f","Type":"ContainerStarted","Data":"99ba173cc3591c9aa66a972ac3a588434ae3dc246eeb4bf140016c6141115233"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.558480 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" event={"ID":"cb92f20f-a3f0-42b3-ae87-11e0215c62fb","Type":"ContainerStarted","Data":"cfb25b31c70c512f5131a2fabd3d9f4a00e59803d45dc04c9043e97932fe1449"} Oct 03 13:47:35 crc kubenswrapper[4861]: E1003 13:47:35.565619 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" podUID="cb92f20f-a3f0-42b3-ae87-11e0215c62fb" Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.575949 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" event={"ID":"c7d483ab-e555-49c8-93c9-8bb99928605a","Type":"ContainerStarted","Data":"ef11beca5b785cd2403ecf652a9094e164ef4f802531ba1c296363cfad098b20"} Oct 03 13:47:35 crc kubenswrapper[4861]: I1003 13:47:35.732062 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" podStartSLOduration=4.745516624 podStartE2EDuration="36.732045425s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.387363445 +0000 UTC m=+936.385348492" lastFinishedPulling="2025-10-03 13:47:34.373892246 +0000 UTC m=+968.371877293" observedRunningTime="2025-10-03 13:47:35.729371385 +0000 UTC m=+969.727356432" watchObservedRunningTime="2025-10-03 13:47:35.732045425 +0000 UTC m=+969.730030472" Oct 03 13:47:36 crc kubenswrapper[4861]: E1003 13:47:36.583833 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" podUID="5f7b92ed-6113-4c12-a8ec-25589c15dd32" Oct 03 13:47:36 crc kubenswrapper[4861]: E1003 13:47:36.583951 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" podUID="cb92f20f-a3f0-42b3-ae87-11e0215c62fb" Oct 03 13:47:36 crc kubenswrapper[4861]: E1003 13:47:36.584140 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:110b885fe640ffdd8536e7da2a613677a6777e3d902e2ff15fa4d5968fe06c54\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" podUID="9bf321cf-e938-46ed-b8b9-01418f85de45" Oct 03 13:47:37 crc kubenswrapper[4861]: I1003 13:47:37.588909 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" event={"ID":"57962592-42ae-47a9-a651-7c5d0e3ffad5","Type":"ContainerStarted","Data":"de4b7b4b8fa6ec768ff67eacacb0f3dce4b36810430d84178b2aff949d4051d3"} Oct 03 13:47:37 crc kubenswrapper[4861]: I1003 13:47:37.590204 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" event={"ID":"a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc","Type":"ContainerStarted","Data":"aeb8bffbdfbfb34e42915bdd3cc2106df87eca3c95fbd03a3fab6f161953373c"} Oct 03 13:47:37 crc kubenswrapper[4861]: I1003 13:47:37.590296 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:37 crc kubenswrapper[4861]: I1003 13:47:37.612863 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" podStartSLOduration=10.032970121 podStartE2EDuration="38.612841352s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.948980085 +0000 UTC m=+935.946965132" lastFinishedPulling="2025-10-03 13:47:30.528851316 +0000 UTC m=+964.526836363" observedRunningTime="2025-10-03 13:47:37.60934755 +0000 UTC m=+971.607332617" watchObservedRunningTime="2025-10-03 13:47:37.612841352 +0000 UTC m=+971.610826419" Oct 03 13:47:38 crc kubenswrapper[4861]: I1003 13:47:38.622540 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" podStartSLOduration=10.703181723 podStartE2EDuration="39.622512609s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.609582202 +0000 UTC m=+935.607567249" lastFinishedPulling="2025-10-03 13:47:30.528913088 +0000 UTC m=+964.526898135" observedRunningTime="2025-10-03 13:47:38.621146773 +0000 UTC m=+972.619131860" watchObservedRunningTime="2025-10-03 13:47:38.622512609 +0000 UTC m=+972.620497666" Oct 03 13:47:38 crc kubenswrapper[4861]: I1003 13:47:38.644566 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" podStartSLOduration=14.566123743 podStartE2EDuration="39.644543138s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:00.528463033 +0000 UTC m=+934.526448070" lastFinishedPulling="2025-10-03 13:47:25.606882428 +0000 UTC m=+959.604867465" observedRunningTime="2025-10-03 13:47:38.639014953 +0000 UTC m=+972.637000020" watchObservedRunningTime="2025-10-03 13:47:38.644543138 +0000 UTC m=+972.642528205" Oct 03 13:47:39 crc kubenswrapper[4861]: I1003 13:47:39.642710 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:47:39 crc kubenswrapper[4861]: I1003 13:47:39.645818 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-sfz28" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.196442 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.218919 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-6qwsn" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.402882 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-5tm2n" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.609149 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" event={"ID":"fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8","Type":"ContainerStarted","Data":"516e071f39c30b386df7326c43ffc4cff6f44b884b6c04f5a2051a06d96275cd"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.609502 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.610914 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" event={"ID":"5a3fecb8-5f79-4f05-9169-7d5cf9072f2c","Type":"ContainerStarted","Data":"6fe7ffa402d20e148565ed4c2ba55d3b12f10cb91584d521e806e20757e77474"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.611573 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.613389 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" event={"ID":"685fbda0-ab44-4f3c-8614-d87234d29d2f","Type":"ContainerStarted","Data":"d19b38c710340909183a611b5eb9ffcc14f3f940c5c3b14a4e17787f41ea045c"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.613823 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.615499 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" event={"ID":"24be9f7b-3c61-4434-8863-b3b5d9e5ee2a","Type":"ContainerStarted","Data":"3348ec2a52ed60c5d68c1d736fb234fa06213b2cee018d6b37a4f445fea9855c"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.615950 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.617819 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" event={"ID":"c7d483ab-e555-49c8-93c9-8bb99928605a","Type":"ContainerStarted","Data":"28ab5ee6ead953d153989670a14547fc28033fe96e1cc65aac2efee3829ea27e"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.618263 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.619896 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" event={"ID":"c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1","Type":"ContainerStarted","Data":"2583f47cde5aa9ebb5c266257158ab302c14428dcaabd544ea18194cd7c3d4dc"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.620374 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.621652 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" event={"ID":"274f0db6-b0be-41da-a6ca-47160736f8e8","Type":"ContainerStarted","Data":"9da53c29d1faf12dd2d57f342b47dafb59ae80df3ff0af686008341a8f73f24e"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.622066 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.623950 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-stsgz" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.625450 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" event={"ID":"20ad49e2-5077-43e2-8671-58457cf10432","Type":"ContainerStarted","Data":"cad394be1c203b990eae226b92f8469a17d4ba3a6cea6c3d98f10b059514637e"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.625816 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.627772 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" event={"ID":"567dc82d-835f-4cf9-805d-a3d65c82b823","Type":"ContainerStarted","Data":"c41d00194e2c896ceab2be82b57e06507b3120f606e62f0f1f8e113f143d9c0c"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.627916 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.629904 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" event={"ID":"16695342-b32b-4303-b248-d616d2ab9676","Type":"ContainerStarted","Data":"ac83dfc89cddfb2147e3be21132796d43664a884e85bf6b1643ad4a939ce3a81"} Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.630250 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" podStartSLOduration=3.853561401 podStartE2EDuration="41.630215204s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.77972559 +0000 UTC m=+935.777710637" lastFinishedPulling="2025-10-03 13:47:39.556379373 +0000 UTC m=+973.554364440" observedRunningTime="2025-10-03 13:47:40.627545953 +0000 UTC m=+974.625531000" watchObservedRunningTime="2025-10-03 13:47:40.630215204 +0000 UTC m=+974.628200251" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.653562 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" podStartSLOduration=3.3492965630000002 podStartE2EDuration="41.653543883s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.253404326 +0000 UTC m=+935.251389373" lastFinishedPulling="2025-10-03 13:47:39.557651646 +0000 UTC m=+973.555636693" observedRunningTime="2025-10-03 13:47:40.6500208 +0000 UTC m=+974.648005857" watchObservedRunningTime="2025-10-03 13:47:40.653543883 +0000 UTC m=+974.651528930" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.715456 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" podStartSLOduration=3.413487778 podStartE2EDuration="41.715433137s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.254387982 +0000 UTC m=+935.252373029" lastFinishedPulling="2025-10-03 13:47:39.556333341 +0000 UTC m=+973.554318388" observedRunningTime="2025-10-03 13:47:40.710766912 +0000 UTC m=+974.708751959" watchObservedRunningTime="2025-10-03 13:47:40.715433137 +0000 UTC m=+974.713418184" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.716801 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" podStartSLOduration=3.195214804 podStartE2EDuration="41.716790772s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.633996474 +0000 UTC m=+935.631981521" lastFinishedPulling="2025-10-03 13:47:40.155572442 +0000 UTC m=+974.153557489" observedRunningTime="2025-10-03 13:47:40.686462297 +0000 UTC m=+974.684447344" watchObservedRunningTime="2025-10-03 13:47:40.716790772 +0000 UTC m=+974.714775829" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.798701 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" podStartSLOduration=4.533152919 podStartE2EDuration="41.798678847s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.293289338 +0000 UTC m=+936.291274385" lastFinishedPulling="2025-10-03 13:47:39.558815266 +0000 UTC m=+973.556800313" observedRunningTime="2025-10-03 13:47:40.7916298 +0000 UTC m=+974.789614847" watchObservedRunningTime="2025-10-03 13:47:40.798678847 +0000 UTC m=+974.796663894" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.871597 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" podStartSLOduration=3.569393057 podStartE2EDuration="41.871576272s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.254103485 +0000 UTC m=+935.252088532" lastFinishedPulling="2025-10-03 13:47:39.5562867 +0000 UTC m=+973.554271747" observedRunningTime="2025-10-03 13:47:40.867000581 +0000 UTC m=+974.864985638" watchObservedRunningTime="2025-10-03 13:47:40.871576272 +0000 UTC m=+974.869561319" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.976502 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" podStartSLOduration=3.9091190940000002 podStartE2EDuration="41.976482188s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.490629581 +0000 UTC m=+935.488614628" lastFinishedPulling="2025-10-03 13:47:39.557992655 +0000 UTC m=+973.555977722" observedRunningTime="2025-10-03 13:47:40.930038975 +0000 UTC m=+974.928024022" watchObservedRunningTime="2025-10-03 13:47:40.976482188 +0000 UTC m=+974.974467235" Oct 03 13:47:40 crc kubenswrapper[4861]: I1003 13:47:40.978529 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" podStartSLOduration=3.258273962 podStartE2EDuration="41.978522341s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.447333211 +0000 UTC m=+935.445318258" lastFinishedPulling="2025-10-03 13:47:40.16758159 +0000 UTC m=+974.165566637" observedRunningTime="2025-10-03 13:47:40.97393871 +0000 UTC m=+974.971923757" watchObservedRunningTime="2025-10-03 13:47:40.978522341 +0000 UTC m=+974.976507388" Oct 03 13:47:41 crc kubenswrapper[4861]: I1003 13:47:41.001059 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" podStartSLOduration=4.66993405 podStartE2EDuration="42.00103775s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.206718139 +0000 UTC m=+936.204703186" lastFinishedPulling="2025-10-03 13:47:39.537821839 +0000 UTC m=+973.535806886" observedRunningTime="2025-10-03 13:47:40.993576531 +0000 UTC m=+974.991561588" watchObservedRunningTime="2025-10-03 13:47:41.00103775 +0000 UTC m=+974.999022797" Oct 03 13:47:41 crc kubenswrapper[4861]: I1003 13:47:41.024378 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" podStartSLOduration=4.300015122 podStartE2EDuration="42.024354438s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.834021209 +0000 UTC m=+935.832006256" lastFinishedPulling="2025-10-03 13:47:39.558360525 +0000 UTC m=+973.556345572" observedRunningTime="2025-10-03 13:47:41.019044717 +0000 UTC m=+975.017029764" watchObservedRunningTime="2025-10-03 13:47:41.024354438 +0000 UTC m=+975.022339485" Oct 03 13:47:41 crc kubenswrapper[4861]: I1003 13:47:41.636347 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:47:43 crc kubenswrapper[4861]: E1003 13:47:43.683698 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" podUID="0ab6f3ab-52bf-404a-8102-195683e803e8" Oct 03 13:47:43 crc kubenswrapper[4861]: E1003 13:47:43.683772 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" podUID="682b0ab4-202c-4455-872c-715e9e6c4ee1" Oct 03 13:47:44 crc kubenswrapper[4861]: E1003 13:47:44.682417 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" podUID="68cf4faf-6f3d-4dfe-9a86-22a803baf77c" Oct 03 13:47:46 crc kubenswrapper[4861]: E1003 13:47:46.687629 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:91e11b31f0c969125d9883b1f765e7c99a62f639b11fab568dec82b38f8cfe74\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" podUID="b7d80f0f-8c96-446e-a31e-90913d19d661" Oct 03 13:47:47 crc kubenswrapper[4861]: I1003 13:47:47.683136 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:47:47 crc kubenswrapper[4861]: E1003 13:47:47.683731 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" podUID="1fa5571a-b9b5-4395-aa7a-a32a670f8e92" Oct 03 13:47:48 crc kubenswrapper[4861]: I1003 13:47:48.692845 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" event={"ID":"5f7b92ed-6113-4c12-a8ec-25589c15dd32","Type":"ContainerStarted","Data":"5164600dbd62a01b75b55277605d2bba7e2e03db5254a028b5ff16d7f4de917f"} Oct 03 13:47:48 crc kubenswrapper[4861]: I1003 13:47:48.693145 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" event={"ID":"9bf321cf-e938-46ed-b8b9-01418f85de45","Type":"ContainerStarted","Data":"4824e6aa9558dc75053b4c8f8b7b72f46811eb3a028f3ef38b527b129e3a0bd0"} Oct 03 13:47:48 crc kubenswrapper[4861]: I1003 13:47:48.694318 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:47:48 crc kubenswrapper[4861]: I1003 13:47:48.694349 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:47:48 crc kubenswrapper[4861]: I1003 13:47:48.722958 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" podStartSLOduration=3.311313925 podStartE2EDuration="49.722937803s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.81772256 +0000 UTC m=+935.815707607" lastFinishedPulling="2025-10-03 13:47:48.229346438 +0000 UTC m=+982.227331485" observedRunningTime="2025-10-03 13:47:48.717406346 +0000 UTC m=+982.715391423" watchObservedRunningTime="2025-10-03 13:47:48.722937803 +0000 UTC m=+982.720922880" Oct 03 13:47:48 crc kubenswrapper[4861]: I1003 13:47:48.743856 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" podStartSLOduration=3.708967898 podStartE2EDuration="49.743836028s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.191837107 +0000 UTC m=+936.189822154" lastFinishedPulling="2025-10-03 13:47:48.226705237 +0000 UTC m=+982.224690284" observedRunningTime="2025-10-03 13:47:48.739554614 +0000 UTC m=+982.737539691" watchObservedRunningTime="2025-10-03 13:47:48.743836028 +0000 UTC m=+982.741821085" Oct 03 13:47:49 crc kubenswrapper[4861]: I1003 13:47:49.939829 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-4nvdf" Oct 03 13:47:49 crc kubenswrapper[4861]: I1003 13:47:49.941124 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-k4wkd" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.006070 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-qkqbk" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.032941 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-599898f689-ch9nw" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.052584 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-qcfrn" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.052729 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-x5mwn" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.206399 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-v26h5" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.297920 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-qlrdc" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.716710 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" event={"ID":"cb92f20f-a3f0-42b3-ae87-11e0215c62fb","Type":"ContainerStarted","Data":"96b1d1ae6f3cb8fbb8dc9e434ec84eb50e1881448d130576e83cb62cd01372a5"} Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.718197 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:47:50 crc kubenswrapper[4861]: I1003 13:47:50.745666 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" podStartSLOduration=3.17442776 podStartE2EDuration="51.745632977s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:01.68779501 +0000 UTC m=+935.685780057" lastFinishedPulling="2025-10-03 13:47:50.259000237 +0000 UTC m=+984.256985274" observedRunningTime="2025-10-03 13:47:50.738317673 +0000 UTC m=+984.736302730" watchObservedRunningTime="2025-10-03 13:47:50.745632977 +0000 UTC m=+984.743618024" Oct 03 13:47:51 crc kubenswrapper[4861]: I1003 13:47:51.604689 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-8dttf" Oct 03 13:47:51 crc kubenswrapper[4861]: I1003 13:47:51.852763 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp" Oct 03 13:47:56 crc kubenswrapper[4861]: I1003 13:47:56.762530 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" event={"ID":"68cf4faf-6f3d-4dfe-9a86-22a803baf77c","Type":"ContainerStarted","Data":"c921377f814197b4c36085ff1996f6386f9b6e66a7d872679ff3dd4cf4f2bc4a"} Oct 03 13:47:56 crc kubenswrapper[4861]: I1003 13:47:56.791363 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-x75vq" podStartSLOduration=2.853249179 podStartE2EDuration="56.791344597s" podCreationTimestamp="2025-10-03 13:47:00 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.363970048 +0000 UTC m=+936.361955085" lastFinishedPulling="2025-10-03 13:47:56.302065456 +0000 UTC m=+990.300050503" observedRunningTime="2025-10-03 13:47:56.784968928 +0000 UTC m=+990.782953975" watchObservedRunningTime="2025-10-03 13:47:56.791344597 +0000 UTC m=+990.789329644" Oct 03 13:47:57 crc kubenswrapper[4861]: I1003 13:47:57.770574 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" event={"ID":"682b0ab4-202c-4455-872c-715e9e6c4ee1","Type":"ContainerStarted","Data":"18090dfe44fd7135d320fa87d12a25aa1e87c0590a9dee95f8a2bd1c0e451569"} Oct 03 13:47:57 crc kubenswrapper[4861]: I1003 13:47:57.771767 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:47:57 crc kubenswrapper[4861]: I1003 13:47:57.791339 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" podStartSLOduration=3.927128342 podStartE2EDuration="58.791322747s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.364215236 +0000 UTC m=+936.362200273" lastFinishedPulling="2025-10-03 13:47:57.228409631 +0000 UTC m=+991.226394678" observedRunningTime="2025-10-03 13:47:57.785629386 +0000 UTC m=+991.783614443" watchObservedRunningTime="2025-10-03 13:47:57.791322747 +0000 UTC m=+991.789307794" Oct 03 13:47:59 crc kubenswrapper[4861]: I1003 13:47:59.790479 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" event={"ID":"0ab6f3ab-52bf-404a-8102-195683e803e8","Type":"ContainerStarted","Data":"a78335f273fb33285ba7e1e28c7a6d8f366616cfa186466d2de6453ccb88646d"} Oct 03 13:47:59 crc kubenswrapper[4861]: I1003 13:47:59.791209 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:47:59 crc kubenswrapper[4861]: I1003 13:47:59.819053 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" podStartSLOduration=3.919397174 podStartE2EDuration="1m0.819035936s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.372366459 +0000 UTC m=+936.370351506" lastFinishedPulling="2025-10-03 13:47:59.272005221 +0000 UTC m=+993.269990268" observedRunningTime="2025-10-03 13:47:59.815248855 +0000 UTC m=+993.813233892" watchObservedRunningTime="2025-10-03 13:47:59.819035936 +0000 UTC m=+993.817020983" Oct 03 13:48:00 crc kubenswrapper[4861]: I1003 13:48:00.118931 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-mlqw7" Oct 03 13:48:00 crc kubenswrapper[4861]: I1003 13:48:00.132016 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" Oct 03 13:48:00 crc kubenswrapper[4861]: I1003 13:48:00.369936 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-xgq77" Oct 03 13:48:01 crc kubenswrapper[4861]: I1003 13:48:01.803320 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" event={"ID":"b7d80f0f-8c96-446e-a31e-90913d19d661","Type":"ContainerStarted","Data":"349b96d103f13682ba4e3610f3692e4575b64fcb49d567d62cd106ae5739d8f1"} Oct 03 13:48:01 crc kubenswrapper[4861]: I1003 13:48:01.803871 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:48:02 crc kubenswrapper[4861]: I1003 13:48:02.812538 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" event={"ID":"1fa5571a-b9b5-4395-aa7a-a32a670f8e92","Type":"ContainerStarted","Data":"f5006754b76baea40fb3c551d4b2d789cba5cabda7d878a69ee44f881ac8a494"} Oct 03 13:48:02 crc kubenswrapper[4861]: I1003 13:48:02.813305 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:48:02 crc kubenswrapper[4861]: I1003 13:48:02.832962 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" podStartSLOduration=4.116456563 podStartE2EDuration="1m3.832938868s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.388894415 +0000 UTC m=+936.386879462" lastFinishedPulling="2025-10-03 13:48:02.10537672 +0000 UTC m=+996.103361767" observedRunningTime="2025-10-03 13:48:02.830115383 +0000 UTC m=+996.828100440" watchObservedRunningTime="2025-10-03 13:48:02.832938868 +0000 UTC m=+996.830923915" Oct 03 13:48:02 crc kubenswrapper[4861]: I1003 13:48:02.834491 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" podStartSLOduration=5.08831279 podStartE2EDuration="1m3.834482829s" podCreationTimestamp="2025-10-03 13:46:59 +0000 UTC" firstStartedPulling="2025-10-03 13:47:02.372595786 +0000 UTC m=+936.370580833" lastFinishedPulling="2025-10-03 13:48:01.118765815 +0000 UTC m=+995.116750872" observedRunningTime="2025-10-03 13:48:01.821955345 +0000 UTC m=+995.819940392" watchObservedRunningTime="2025-10-03 13:48:02.834482829 +0000 UTC m=+996.832467876" Oct 03 13:48:10 crc kubenswrapper[4861]: I1003 13:48:10.496415 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-9wwkj" Oct 03 13:48:10 crc kubenswrapper[4861]: I1003 13:48:10.539052 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-5tfpv" Oct 03 13:48:10 crc kubenswrapper[4861]: I1003 13:48:10.540664 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-vjdcg" Oct 03 13:48:10 crc kubenswrapper[4861]: I1003 13:48:10.678131 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-mwpqn" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.011098 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ckdrf"] Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.015691 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.019804 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.020041 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.020662 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.020868 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-25q47" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.023595 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ckdrf"] Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.086766 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8lgmr"] Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.088048 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.092591 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.101916 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8lgmr"] Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.204630 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-config\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.204700 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5gv2\" (UniqueName: \"kubernetes.io/projected/131ad935-aaa5-4684-a11f-8591d23823d6-kube-api-access-c5gv2\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.204729 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xvrz\" (UniqueName: \"kubernetes.io/projected/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-kube-api-access-2xvrz\") pod \"dnsmasq-dns-675f4bcbfc-ckdrf\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.204760 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-config\") pod \"dnsmasq-dns-675f4bcbfc-ckdrf\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.204788 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.306479 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-config\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.306558 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5gv2\" (UniqueName: \"kubernetes.io/projected/131ad935-aaa5-4684-a11f-8591d23823d6-kube-api-access-c5gv2\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.306587 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xvrz\" (UniqueName: \"kubernetes.io/projected/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-kube-api-access-2xvrz\") pod \"dnsmasq-dns-675f4bcbfc-ckdrf\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.306624 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-config\") pod \"dnsmasq-dns-675f4bcbfc-ckdrf\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.306657 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.307718 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-config\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.307730 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-config\") pod \"dnsmasq-dns-675f4bcbfc-ckdrf\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.308147 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.326779 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5gv2\" (UniqueName: \"kubernetes.io/projected/131ad935-aaa5-4684-a11f-8591d23823d6-kube-api-access-c5gv2\") pod \"dnsmasq-dns-78dd6ddcc-8lgmr\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.327994 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xvrz\" (UniqueName: \"kubernetes.io/projected/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-kube-api-access-2xvrz\") pod \"dnsmasq-dns-675f4bcbfc-ckdrf\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.335840 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.402497 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.753745 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8lgmr"] Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.811760 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ckdrf"] Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.976315 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" event={"ID":"bac78dbe-eb86-4702-8fbf-2a9e037ab76f","Type":"ContainerStarted","Data":"7002f3f719ce272b7cae8b3d4ab115d740273f343340a68a4e1e10bb28b32632"} Oct 03 13:48:28 crc kubenswrapper[4861]: I1003 13:48:28.977521 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" event={"ID":"131ad935-aaa5-4684-a11f-8591d23823d6","Type":"ContainerStarted","Data":"8b756b017a8474c3b038cafe80845a8bae60511a915aa7e26b75fed99b9420d4"} Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.707968 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ckdrf"] Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.780203 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dzkq2"] Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.784453 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.807657 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dzkq2"] Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.975311 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-config\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.975369 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:30 crc kubenswrapper[4861]: I1003 13:48:30.975446 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2npqg\" (UniqueName: \"kubernetes.io/projected/bc8b9936-3237-4bdf-99b5-94c8154217dc-kube-api-access-2npqg\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.083148 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-config\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.083202 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.083276 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2npqg\" (UniqueName: \"kubernetes.io/projected/bc8b9936-3237-4bdf-99b5-94c8154217dc-kube-api-access-2npqg\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.084587 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-config\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.085783 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.120514 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2npqg\" (UniqueName: \"kubernetes.io/projected/bc8b9936-3237-4bdf-99b5-94c8154217dc-kube-api-access-2npqg\") pod \"dnsmasq-dns-5ccc8479f9-dzkq2\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.256957 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8lgmr"] Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.297036 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2ntbd"] Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.305712 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.319501 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2ntbd"] Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.413756 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.489221 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-config\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.489307 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgzjh\" (UniqueName: \"kubernetes.io/projected/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-kube-api-access-wgzjh\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.489386 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.590636 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-config\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.590729 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgzjh\" (UniqueName: \"kubernetes.io/projected/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-kube-api-access-wgzjh\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.590792 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.592141 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-config\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.592219 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.618592 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgzjh\" (UniqueName: \"kubernetes.io/projected/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-kube-api-access-wgzjh\") pod \"dnsmasq-dns-57d769cc4f-2ntbd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:31 crc kubenswrapper[4861]: I1003 13:48:31.638014 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.033032 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.035200 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.038786 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.039295 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.043259 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-8zws5" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.043474 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.043866 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.044095 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.044253 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.050706 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.099345 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dzkq2"] Oct 03 13:48:32 crc kubenswrapper[4861]: W1003 13:48:32.105102 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc8b9936_3237_4bdf_99b5_94c8154217dc.slice/crio-29b2e3afe7f0330942ec8584c9fe0fc620d7b1daf252a262d3e0452d15b5b073 WatchSource:0}: Error finding container 29b2e3afe7f0330942ec8584c9fe0fc620d7b1daf252a262d3e0452d15b5b073: Status 404 returned error can't find the container with id 29b2e3afe7f0330942ec8584c9fe0fc620d7b1daf252a262d3e0452d15b5b073 Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132094 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132146 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132171 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132194 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fmjk\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-kube-api-access-6fmjk\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132215 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132247 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132265 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132287 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132313 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132342 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.132380 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235093 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235197 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235252 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235303 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235351 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235394 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235465 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235499 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fmjk\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-kube-api-access-6fmjk\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235525 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235545 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235564 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.235882 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.238122 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.240136 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.241101 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.241372 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.241499 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.248624 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.251749 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.260399 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.264830 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fmjk\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-kube-api-access-6fmjk\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.264880 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.266420 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2ntbd"] Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.276272 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: W1003 13:48:32.290009 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23e02e1d_0ff1_4ce2_a299_5802ec3177cd.slice/crio-4da56b6dc594357510dfa410227eddaedac1ece1a8db396d1698d6f54fa1e160 WatchSource:0}: Error finding container 4da56b6dc594357510dfa410227eddaedac1ece1a8db396d1698d6f54fa1e160: Status 404 returned error can't find the container with id 4da56b6dc594357510dfa410227eddaedac1ece1a8db396d1698d6f54fa1e160 Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.369191 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.420164 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.423097 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.430217 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.432865 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.433799 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.434594 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-cw2gs" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.435693 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.435793 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.435931 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.437830 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539129 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9jpr\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-kube-api-access-j9jpr\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539188 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-config-data\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539270 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539304 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539348 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539371 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9bf9ee1-8038-4578-b10d-390a82c11290-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539486 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539545 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539594 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9bf9ee1-8038-4578-b10d-390a82c11290-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539647 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.539663 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.641108 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9jpr\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-kube-api-access-j9jpr\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.641455 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-config-data\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.641492 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.641521 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642329 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642372 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9bf9ee1-8038-4578-b10d-390a82c11290-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642339 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642457 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642477 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642543 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9bf9ee1-8038-4578-b10d-390a82c11290-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642616 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.642639 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.643139 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-config-data\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.643599 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.643730 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.643962 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.645479 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.645979 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.647487 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.648583 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9bf9ee1-8038-4578-b10d-390a82c11290-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.659342 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9bf9ee1-8038-4578-b10d-390a82c11290-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.668814 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9jpr\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-kube-api-access-j9jpr\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.679297 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.772128 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:48:32 crc kubenswrapper[4861]: I1003 13:48:32.969103 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:48:33 crc kubenswrapper[4861]: I1003 13:48:33.030757 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" event={"ID":"23e02e1d-0ff1-4ce2-a299-5802ec3177cd","Type":"ContainerStarted","Data":"4da56b6dc594357510dfa410227eddaedac1ece1a8db396d1698d6f54fa1e160"} Oct 03 13:48:33 crc kubenswrapper[4861]: I1003 13:48:33.036906 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" event={"ID":"bc8b9936-3237-4bdf-99b5-94c8154217dc","Type":"ContainerStarted","Data":"29b2e3afe7f0330942ec8584c9fe0fc620d7b1daf252a262d3e0452d15b5b073"} Oct 03 13:48:33 crc kubenswrapper[4861]: I1003 13:48:33.039295 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7","Type":"ContainerStarted","Data":"f1ea78a55403cb178ffafddbd1d03caff9e2ecb3c24fade79c89c4ecf8106838"} Oct 03 13:48:33 crc kubenswrapper[4861]: I1003 13:48:33.425826 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:48:33 crc kubenswrapper[4861]: W1003 13:48:33.580991 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9bf9ee1_8038_4578_b10d_390a82c11290.slice/crio-f5f96bd243c9a7efc6b1275195e8c08800f6a91192e5de63fda155ad336005b3 WatchSource:0}: Error finding container f5f96bd243c9a7efc6b1275195e8c08800f6a91192e5de63fda155ad336005b3: Status 404 returned error can't find the container with id f5f96bd243c9a7efc6b1275195e8c08800f6a91192e5de63fda155ad336005b3 Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.060061 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9bf9ee1-8038-4578-b10d-390a82c11290","Type":"ContainerStarted","Data":"f5f96bd243c9a7efc6b1275195e8c08800f6a91192e5de63fda155ad336005b3"} Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.869378 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.871869 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.875207 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.880102 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.881792 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.883701 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-9lf5x" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.883850 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.883967 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.890690 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.968979 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.970686 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.976197 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-pjv4v" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.976366 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.976604 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.979081 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.986502 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.986564 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-secrets\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.986623 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqpp7\" (UniqueName: \"kubernetes.io/projected/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-kube-api-access-dqpp7\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.990973 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.991037 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.991123 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.991168 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-kolla-config\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.991201 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:34 crc kubenswrapper[4861]: I1003 13:48:34.991256 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-config-data-default\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.006685 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094246 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-kolla-config\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094300 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094327 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094506 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-config-data-default\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094582 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094608 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094644 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094684 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094717 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-secrets\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094756 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094782 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094811 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqpp7\" (UniqueName: \"kubernetes.io/projected/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-kube-api-access-dqpp7\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094857 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/cc0949fe-630b-4f65-9c6c-7a87272586a2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094887 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094913 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9tlb\" (UniqueName: \"kubernetes.io/projected/cc0949fe-630b-4f65-9c6c-7a87272586a2-kube-api-access-f9tlb\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094939 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094963 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.094991 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.095064 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-kolla-config\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.095626 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.095938 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-config-data-default\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.096510 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.099369 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.102811 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-secrets\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.116748 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.135488 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqpp7\" (UniqueName: \"kubernetes.io/projected/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-kube-api-access-dqpp7\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.140255 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.140952 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1\") " pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.190856 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.197152 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.197878 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.197904 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.197950 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.198204 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.198250 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.198316 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/cc0949fe-630b-4f65-9c6c-7a87272586a2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.198346 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9tlb\" (UniqueName: \"kubernetes.io/projected/cc0949fe-630b-4f65-9c6c-7a87272586a2-kube-api-access-f9tlb\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.198377 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.198398 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.199202 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/cc0949fe-630b-4f65-9c6c-7a87272586a2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.201071 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.201452 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.206345 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.216697 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cc0949fe-630b-4f65-9c6c-7a87272586a2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.224860 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.225788 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9tlb\" (UniqueName: \"kubernetes.io/projected/cc0949fe-630b-4f65-9c6c-7a87272586a2-kube-api-access-f9tlb\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.228124 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0949fe-630b-4f65-9c6c-7a87272586a2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.253680 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"cc0949fe-630b-4f65-9c6c-7a87272586a2\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.310615 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.579441 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.580561 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.584630 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.620888 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.621180 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.621366 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-dpn6p" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.724992 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.725048 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.725067 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-kolla-config\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.725115 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-966jn\" (UniqueName: \"kubernetes.io/projected/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-kube-api-access-966jn\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.725187 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-config-data\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.826968 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.827045 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.827076 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-kolla-config\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.827152 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-966jn\" (UniqueName: \"kubernetes.io/projected/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-kube-api-access-966jn\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.827207 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-config-data\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.837187 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-kolla-config\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.837673 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-config-data\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.848156 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.855176 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.866918 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-966jn\" (UniqueName: \"kubernetes.io/projected/7464ed9c-8f08-4c7a-8ca3-8a57734bd31f-kube-api-access-966jn\") pod \"memcached-0\" (UID: \"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f\") " pod="openstack/memcached-0" Oct 03 13:48:35 crc kubenswrapper[4861]: I1003 13:48:35.949297 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.459479 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.466623 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.481136 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-ltdpl" Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.499922 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.570388 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2twt6\" (UniqueName: \"kubernetes.io/projected/f6bf9658-85f4-4e0b-9aa6-cf672e04a858-kube-api-access-2twt6\") pod \"kube-state-metrics-0\" (UID: \"f6bf9658-85f4-4e0b-9aa6-cf672e04a858\") " pod="openstack/kube-state-metrics-0" Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.676459 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2twt6\" (UniqueName: \"kubernetes.io/projected/f6bf9658-85f4-4e0b-9aa6-cf672e04a858-kube-api-access-2twt6\") pod \"kube-state-metrics-0\" (UID: \"f6bf9658-85f4-4e0b-9aa6-cf672e04a858\") " pod="openstack/kube-state-metrics-0" Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.724353 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2twt6\" (UniqueName: \"kubernetes.io/projected/f6bf9658-85f4-4e0b-9aa6-cf672e04a858-kube-api-access-2twt6\") pod \"kube-state-metrics-0\" (UID: \"f6bf9658-85f4-4e0b-9aa6-cf672e04a858\") " pod="openstack/kube-state-metrics-0" Oct 03 13:48:37 crc kubenswrapper[4861]: I1003 13:48:37.797533 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.612429 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.614446 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.617551 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.617685 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.617803 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.617815 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-7g7jl" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.619116 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.630888 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728243 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6714f489-ceb2-4b99-a61d-fe45289bed5f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728298 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6714f489-ceb2-4b99-a61d-fe45289bed5f-config\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728325 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728357 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728400 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728427 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6714f489-ceb2-4b99-a61d-fe45289bed5f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728455 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.728474 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdfmw\" (UniqueName: \"kubernetes.io/projected/6714f489-ceb2-4b99-a61d-fe45289bed5f-kube-api-access-pdfmw\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829454 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6714f489-ceb2-4b99-a61d-fe45289bed5f-config\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829516 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829567 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829651 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829712 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6714f489-ceb2-4b99-a61d-fe45289bed5f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829738 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829764 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdfmw\" (UniqueName: \"kubernetes.io/projected/6714f489-ceb2-4b99-a61d-fe45289bed5f-kube-api-access-pdfmw\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.829905 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6714f489-ceb2-4b99-a61d-fe45289bed5f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.830809 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.830932 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6714f489-ceb2-4b99-a61d-fe45289bed5f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.831298 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6714f489-ceb2-4b99-a61d-fe45289bed5f-config\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.832387 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6714f489-ceb2-4b99-a61d-fe45289bed5f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.846139 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.848463 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.857799 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6714f489-ceb2-4b99-a61d-fe45289bed5f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.870777 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.874559 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdfmw\" (UniqueName: \"kubernetes.io/projected/6714f489-ceb2-4b99-a61d-fe45289bed5f-kube-api-access-pdfmw\") pod \"ovsdbserver-nb-0\" (UID: \"6714f489-ceb2-4b99-a61d-fe45289bed5f\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.931580 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-x5wkc"] Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.932780 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.935156 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.935403 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-f6xpj" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.939620 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.947423 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 13:48:40 crc kubenswrapper[4861]: I1003 13:48:40.960767 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x5wkc"] Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.000059 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-j8c4m"] Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.001789 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.017286 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-j8c4m"] Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.032352 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef2b767c-ef6a-4364-a3f0-14b68bee3986-ovn-controller-tls-certs\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.032591 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef2b767c-ef6a-4364-a3f0-14b68bee3986-scripts\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.032694 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4r2r\" (UniqueName: \"kubernetes.io/projected/ef2b767c-ef6a-4364-a3f0-14b68bee3986-kube-api-access-z4r2r\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.032786 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-run-ovn\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.032859 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-log-ovn\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.032948 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-run\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.033067 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef2b767c-ef6a-4364-a3f0-14b68bee3986-combined-ca-bundle\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134570 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef2b767c-ef6a-4364-a3f0-14b68bee3986-ovn-controller-tls-certs\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134625 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef2b767c-ef6a-4364-a3f0-14b68bee3986-scripts\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134655 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-etc-ovs\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134682 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/72bbb704-bf32-46a9-9540-32c2e385f8ab-scripts\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4r2r\" (UniqueName: \"kubernetes.io/projected/ef2b767c-ef6a-4364-a3f0-14b68bee3986-kube-api-access-z4r2r\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134739 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-lib\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134756 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxzpm\" (UniqueName: \"kubernetes.io/projected/72bbb704-bf32-46a9-9540-32c2e385f8ab-kube-api-access-wxzpm\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134786 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-run-ovn\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134801 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-log-ovn\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134816 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-log\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134855 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-run\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134899 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef2b767c-ef6a-4364-a3f0-14b68bee3986-combined-ca-bundle\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.134919 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-run\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.135873 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-log-ovn\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.136244 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-run-ovn\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.136433 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ef2b767c-ef6a-4364-a3f0-14b68bee3986-var-run\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.138325 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef2b767c-ef6a-4364-a3f0-14b68bee3986-combined-ca-bundle\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.140782 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef2b767c-ef6a-4364-a3f0-14b68bee3986-scripts\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.142425 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef2b767c-ef6a-4364-a3f0-14b68bee3986-ovn-controller-tls-certs\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.163385 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4r2r\" (UniqueName: \"kubernetes.io/projected/ef2b767c-ef6a-4364-a3f0-14b68bee3986-kube-api-access-z4r2r\") pod \"ovn-controller-x5wkc\" (UID: \"ef2b767c-ef6a-4364-a3f0-14b68bee3986\") " pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236655 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-etc-ovs\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236703 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/72bbb704-bf32-46a9-9540-32c2e385f8ab-scripts\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236734 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-lib\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236751 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxzpm\" (UniqueName: \"kubernetes.io/projected/72bbb704-bf32-46a9-9540-32c2e385f8ab-kube-api-access-wxzpm\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236777 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-log\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236848 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-run\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.236936 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-run\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.237103 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-etc-ovs\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.238869 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/72bbb704-bf32-46a9-9540-32c2e385f8ab-scripts\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.238992 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-lib\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.239355 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/72bbb704-bf32-46a9-9540-32c2e385f8ab-var-log\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.248941 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x5wkc" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.255037 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxzpm\" (UniqueName: \"kubernetes.io/projected/72bbb704-bf32-46a9-9540-32c2e385f8ab-kube-api-access-wxzpm\") pod \"ovn-controller-ovs-j8c4m\" (UID: \"72bbb704-bf32-46a9-9540-32c2e385f8ab\") " pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:41 crc kubenswrapper[4861]: I1003 13:48:41.322651 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.966861 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.968817 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.979788 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.979826 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.980585 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.980732 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-qkhx2" Oct 03 13:48:44 crc kubenswrapper[4861]: I1003 13:48:44.981814 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.095754 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.095802 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.095835 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.095887 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-config\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.095906 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.096015 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc458\" (UniqueName: \"kubernetes.io/projected/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-kube-api-access-bc458\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.096048 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.096082 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198008 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-config\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198561 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198672 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc458\" (UniqueName: \"kubernetes.io/projected/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-kube-api-access-bc458\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198697 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198723 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198751 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198769 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198792 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.198875 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-config\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.199085 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.199168 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.200267 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.211342 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.211584 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.212191 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.225534 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc458\" (UniqueName: \"kubernetes.io/projected/a361bfa1-97ef-4ee2-bcfe-3763898cbc32-kube-api-access-bc458\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.226944 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a361bfa1-97ef-4ee2-bcfe-3763898cbc32\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:48:45 crc kubenswrapper[4861]: I1003 13:48:45.298404 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.391342 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.391816 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2xvrz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-ckdrf_openstack(bac78dbe-eb86-4702-8fbf-2a9e037ab76f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.394360 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" podUID="bac78dbe-eb86-4702-8fbf-2a9e037ab76f" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.404456 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.404620 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2npqg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-dzkq2_openstack(bc8b9936-3237-4bdf-99b5-94c8154217dc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.409369 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.409525 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wgzjh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-2ntbd_openstack(23e02e1d-0ff1-4ce2-a299-5802ec3177cd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.411292 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.415311 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.557444 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.560415 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c5gv2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-8lgmr_openstack(131ad935-aaa5-4684-a11f-8591d23823d6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:49:01 crc kubenswrapper[4861]: E1003 13:49:01.561979 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" podUID="131ad935-aaa5-4684-a11f-8591d23823d6" Oct 03 13:49:01 crc kubenswrapper[4861]: I1003 13:49:01.943541 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x5wkc"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.011517 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.018000 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.026449 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.036166 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.072709 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 13:49:02 crc kubenswrapper[4861]: W1003 13:49:02.110513 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6714f489_ceb2_4b99_a61d_fe45289bed5f.slice/crio-3b17906fb076623faef6070c7ac69734b81c0161a47888876a15b6f43653bad4 WatchSource:0}: Error finding container 3b17906fb076623faef6070c7ac69734b81c0161a47888876a15b6f43653bad4: Status 404 returned error can't find the container with id 3b17906fb076623faef6070c7ac69734b81c0161a47888876a15b6f43653bad4 Oct 03 13:49:02 crc kubenswrapper[4861]: W1003 13:49:02.116021 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode729e54c_b7bc_46e8_94c8_ef5d8a4f42b1.slice/crio-6d16b2a97f67a85dcba06c896005ea553966a6e2902ce953c4b1ade18e7999c7 WatchSource:0}: Error finding container 6d16b2a97f67a85dcba06c896005ea553966a6e2902ce953c4b1ade18e7999c7: Status 404 returned error can't find the container with id 6d16b2a97f67a85dcba06c896005ea553966a6e2902ce953c4b1ade18e7999c7 Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.198649 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-j8c4m"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.357614 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f","Type":"ContainerStarted","Data":"3aca2f988a506ad1a7499fa4d5b77977c07f3d2dfa3992496eec91762cdf68e0"} Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.358600 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x5wkc" event={"ID":"ef2b767c-ef6a-4364-a3f0-14b68bee3986","Type":"ContainerStarted","Data":"1bd86c5368e631635cf7e0e9868fe31143c8f92f3768fb3eee7650449bf2a286"} Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.361156 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1","Type":"ContainerStarted","Data":"6d16b2a97f67a85dcba06c896005ea553966a6e2902ce953c4b1ade18e7999c7"} Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.362949 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6bf9658-85f4-4e0b-9aa6-cf672e04a858","Type":"ContainerStarted","Data":"371b5cb918786219d9674932e023378bc86ce57020828b9077c3c8b00282851a"} Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.363783 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6714f489-ceb2-4b99-a61d-fe45289bed5f","Type":"ContainerStarted","Data":"3b17906fb076623faef6070c7ac69734b81c0161a47888876a15b6f43653bad4"} Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.365698 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"cc0949fe-630b-4f65-9c6c-7a87272586a2","Type":"ContainerStarted","Data":"0f2dcbd77441d635a08946c3ee327bbf6042326a786f01694bd8c889c781b0dc"} Oct 03 13:49:02 crc kubenswrapper[4861]: E1003 13:49:02.367030 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" Oct 03 13:49:02 crc kubenswrapper[4861]: E1003 13:49:02.367199 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.530745 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.719454 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.829892 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xvrz\" (UniqueName: \"kubernetes.io/projected/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-kube-api-access-2xvrz\") pod \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.830259 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-config\") pod \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\" (UID: \"bac78dbe-eb86-4702-8fbf-2a9e037ab76f\") " Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.832447 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-config" (OuterVolumeSpecName: "config") pod "bac78dbe-eb86-4702-8fbf-2a9e037ab76f" (UID: "bac78dbe-eb86-4702-8fbf-2a9e037ab76f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.843481 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-kube-api-access-2xvrz" (OuterVolumeSpecName: "kube-api-access-2xvrz") pod "bac78dbe-eb86-4702-8fbf-2a9e037ab76f" (UID: "bac78dbe-eb86-4702-8fbf-2a9e037ab76f"). InnerVolumeSpecName "kube-api-access-2xvrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.873999 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.937879 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-dns-svc\") pod \"131ad935-aaa5-4684-a11f-8591d23823d6\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.937984 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-config\") pod \"131ad935-aaa5-4684-a11f-8591d23823d6\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.938267 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5gv2\" (UniqueName: \"kubernetes.io/projected/131ad935-aaa5-4684-a11f-8591d23823d6-kube-api-access-c5gv2\") pod \"131ad935-aaa5-4684-a11f-8591d23823d6\" (UID: \"131ad935-aaa5-4684-a11f-8591d23823d6\") " Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.938734 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xvrz\" (UniqueName: \"kubernetes.io/projected/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-kube-api-access-2xvrz\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.938753 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac78dbe-eb86-4702-8fbf-2a9e037ab76f-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.939383 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "131ad935-aaa5-4684-a11f-8591d23823d6" (UID: "131ad935-aaa5-4684-a11f-8591d23823d6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.939483 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-config" (OuterVolumeSpecName: "config") pod "131ad935-aaa5-4684-a11f-8591d23823d6" (UID: "131ad935-aaa5-4684-a11f-8591d23823d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:02 crc kubenswrapper[4861]: I1003 13:49:02.946031 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/131ad935-aaa5-4684-a11f-8591d23823d6-kube-api-access-c5gv2" (OuterVolumeSpecName: "kube-api-access-c5gv2") pod "131ad935-aaa5-4684-a11f-8591d23823d6" (UID: "131ad935-aaa5-4684-a11f-8591d23823d6"). InnerVolumeSpecName "kube-api-access-c5gv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.040330 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.040384 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/131ad935-aaa5-4684-a11f-8591d23823d6-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.040397 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5gv2\" (UniqueName: \"kubernetes.io/projected/131ad935-aaa5-4684-a11f-8591d23823d6-kube-api-access-c5gv2\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.375531 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9bf9ee1-8038-4578-b10d-390a82c11290","Type":"ContainerStarted","Data":"9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665"} Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.379503 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.379515 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-ckdrf" event={"ID":"bac78dbe-eb86-4702-8fbf-2a9e037ab76f","Type":"ContainerDied","Data":"7002f3f719ce272b7cae8b3d4ab115d740273f343340a68a4e1e10bb28b32632"} Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.383764 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7","Type":"ContainerStarted","Data":"a457e9d6f265baf52dcad5b846aa53b26ec38f6a5d91ffdde9329266fe7937eb"} Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.385911 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a361bfa1-97ef-4ee2-bcfe-3763898cbc32","Type":"ContainerStarted","Data":"ab0ab86a55b0a62e0bfdc1a95c620071dad01a061f748da03bb87684913c17df"} Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.387356 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-j8c4m" event={"ID":"72bbb704-bf32-46a9-9540-32c2e385f8ab","Type":"ContainerStarted","Data":"b3d5ff36f732dab1ea4dec742183b04d39721fdfc57239d88503ba10d5e20c4d"} Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.388179 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" event={"ID":"131ad935-aaa5-4684-a11f-8591d23823d6","Type":"ContainerDied","Data":"8b756b017a8474c3b038cafe80845a8bae60511a915aa7e26b75fed99b9420d4"} Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.388225 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8lgmr" Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.494903 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8lgmr"] Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.498803 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8lgmr"] Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.514771 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ckdrf"] Oct 03 13:49:03 crc kubenswrapper[4861]: I1003 13:49:03.521278 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ckdrf"] Oct 03 13:49:04 crc kubenswrapper[4861]: I1003 13:49:04.694794 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="131ad935-aaa5-4684-a11f-8591d23823d6" path="/var/lib/kubelet/pods/131ad935-aaa5-4684-a11f-8591d23823d6/volumes" Oct 03 13:49:04 crc kubenswrapper[4861]: I1003 13:49:04.695610 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac78dbe-eb86-4702-8fbf-2a9e037ab76f" path="/var/lib/kubelet/pods/bac78dbe-eb86-4702-8fbf-2a9e037ab76f/volumes" Oct 03 13:49:14 crc kubenswrapper[4861]: E1003 13:49:14.393381 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified" Oct 03 13:49:14 crc kubenswrapper[4861]: E1003 13:49:14.394103 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n59fh5bh595h5f7h84h688h5fdh664h647h57dh645h686h646h647h555h5d7h89h579hcfh6dh64h9ch56bh89h679h659h9fh676h5chf8h675h667q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z4r2r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-x5wkc_openstack(ef2b767c-ef6a-4364-a3f0-14b68bee3986): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:49:14 crc kubenswrapper[4861]: E1003 13:49:14.395368 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-x5wkc" podUID="ef2b767c-ef6a-4364-a3f0-14b68bee3986" Oct 03 13:49:14 crc kubenswrapper[4861]: E1003 13:49:14.480440 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified\\\"\"" pod="openstack/ovn-controller-x5wkc" podUID="ef2b767c-ef6a-4364-a3f0-14b68bee3986" Oct 03 13:49:14 crc kubenswrapper[4861]: E1003 13:49:14.772222 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Oct 03 13:49:14 crc kubenswrapper[4861]: E1003 13:49:14.772466 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nddh689h8hcfh68bh664h698h56h8dhfh695hd7h684hf8h5ddh55fh646h577hf6h5d4h5cfhbch7dh56h55fh64ch649h676h5b7h5d9hb6h597q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bc458,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(a361bfa1-97ef-4ee2-bcfe-3763898cbc32): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:49:18 crc kubenswrapper[4861]: E1003 13:49:18.607245 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Oct 03 13:49:18 crc kubenswrapper[4861]: E1003 13:49:18.607770 4861 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Oct 03 13:49:18 crc kubenswrapper[4861]: E1003 13:49:18.607921 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2twt6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(f6bf9658-85f4-4e0b-9aa6-cf672e04a858): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 13:49:18 crc kubenswrapper[4861]: E1003 13:49:18.610452 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.515374 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7464ed9c-8f08-4c7a-8ca3-8a57734bd31f","Type":"ContainerStarted","Data":"c512b663f93107f86c986c8efb04e45e54c124224378c18cd718246aa44a25cd"} Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.515982 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.517298 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1","Type":"ContainerStarted","Data":"0ec7640c6af20c2caf04de5beef0ac46e8b6494aa7a11b4b706ff2fc41f4a225"} Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.518917 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6714f489-ceb2-4b99-a61d-fe45289bed5f","Type":"ContainerStarted","Data":"ca3a44eb0e6aa68942df9410098ac666816385a10f2b3cbd052a50f2fce1a381"} Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.521392 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-j8c4m" event={"ID":"72bbb704-bf32-46a9-9540-32c2e385f8ab","Type":"ContainerStarted","Data":"232818ccc61e9571a035e96d9edc68ec44130d890d419e7baab4688254c9e4c8"} Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.522824 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"cc0949fe-630b-4f65-9c6c-7a87272586a2","Type":"ContainerStarted","Data":"dd7846e5bce6ad3fb45b70be69da40b1fd5b6a02c477d51bbb2694afb8ec6b2b"} Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.524599 4861 generic.go:334] "Generic (PLEG): container finished" podID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerID="47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024" exitCode=0 Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.524675 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" event={"ID":"23e02e1d-0ff1-4ce2-a299-5802ec3177cd","Type":"ContainerDied","Data":"47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024"} Oct 03 13:49:19 crc kubenswrapper[4861]: E1003 13:49:19.526418 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" Oct 03 13:49:19 crc kubenswrapper[4861]: I1003 13:49:19.537985 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=30.945597577 podStartE2EDuration="44.537970945s" podCreationTimestamp="2025-10-03 13:48:35 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.09964528 +0000 UTC m=+1056.097630327" lastFinishedPulling="2025-10-03 13:49:15.692018648 +0000 UTC m=+1069.690003695" observedRunningTime="2025-10-03 13:49:19.53325143 +0000 UTC m=+1073.531236487" watchObservedRunningTime="2025-10-03 13:49:19.537970945 +0000 UTC m=+1073.535955992" Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.555075 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" event={"ID":"23e02e1d-0ff1-4ce2-a299-5802ec3177cd","Type":"ContainerStarted","Data":"8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2"} Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.557564 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.579744 4861 generic.go:334] "Generic (PLEG): container finished" podID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerID="b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81" exitCode=0 Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.580551 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" event={"ID":"bc8b9936-3237-4bdf-99b5-94c8154217dc","Type":"ContainerDied","Data":"b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81"} Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.587219 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" podStartSLOduration=3.472281649 podStartE2EDuration="49.587198721s" podCreationTimestamp="2025-10-03 13:48:31 +0000 UTC" firstStartedPulling="2025-10-03 13:48:32.292814205 +0000 UTC m=+1026.290799252" lastFinishedPulling="2025-10-03 13:49:18.407731277 +0000 UTC m=+1072.405716324" observedRunningTime="2025-10-03 13:49:20.580433183 +0000 UTC m=+1074.578418230" watchObservedRunningTime="2025-10-03 13:49:20.587198721 +0000 UTC m=+1074.585183768" Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.588585 4861 generic.go:334] "Generic (PLEG): container finished" podID="72bbb704-bf32-46a9-9540-32c2e385f8ab" containerID="232818ccc61e9571a035e96d9edc68ec44130d890d419e7baab4688254c9e4c8" exitCode=0 Oct 03 13:49:20 crc kubenswrapper[4861]: I1003 13:49:20.590021 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-j8c4m" event={"ID":"72bbb704-bf32-46a9-9540-32c2e385f8ab","Type":"ContainerDied","Data":"232818ccc61e9571a035e96d9edc68ec44130d890d419e7baab4688254c9e4c8"} Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.607275 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" event={"ID":"bc8b9936-3237-4bdf-99b5-94c8154217dc","Type":"ContainerStarted","Data":"6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829"} Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.608363 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.611673 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-j8c4m" event={"ID":"72bbb704-bf32-46a9-9540-32c2e385f8ab","Type":"ContainerStarted","Data":"d97edc85a6c3193f101e7501a6ae1fb63b7fca70b8f6277b8cd70aaa2c30d007"} Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.611756 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-j8c4m" event={"ID":"72bbb704-bf32-46a9-9540-32c2e385f8ab","Type":"ContainerStarted","Data":"81fa8046804d52544f16e7e3b17e20bfa291fe668a4f770ca3e34958a5339910"} Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.612202 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.612337 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.630288 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" podStartSLOduration=3.878505646 podStartE2EDuration="51.630259916s" podCreationTimestamp="2025-10-03 13:48:30 +0000 UTC" firstStartedPulling="2025-10-03 13:48:32.107400682 +0000 UTC m=+1026.105385729" lastFinishedPulling="2025-10-03 13:49:19.859154942 +0000 UTC m=+1073.857139999" observedRunningTime="2025-10-03 13:49:21.625140841 +0000 UTC m=+1075.623125908" watchObservedRunningTime="2025-10-03 13:49:21.630259916 +0000 UTC m=+1075.628244963" Oct 03 13:49:21 crc kubenswrapper[4861]: I1003 13:49:21.650809 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-j8c4m" podStartSLOduration=27.907399697 podStartE2EDuration="41.650791687s" podCreationTimestamp="2025-10-03 13:48:40 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.399004514 +0000 UTC m=+1056.396989561" lastFinishedPulling="2025-10-03 13:49:16.142396504 +0000 UTC m=+1070.140381551" observedRunningTime="2025-10-03 13:49:21.648512726 +0000 UTC m=+1075.646497803" watchObservedRunningTime="2025-10-03 13:49:21.650791687 +0000 UTC m=+1075.648776734" Oct 03 13:49:25 crc kubenswrapper[4861]: I1003 13:49:25.950816 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.417349 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.639470 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.646095 4861 generic.go:334] "Generic (PLEG): container finished" podID="cc0949fe-630b-4f65-9c6c-7a87272586a2" containerID="dd7846e5bce6ad3fb45b70be69da40b1fd5b6a02c477d51bbb2694afb8ec6b2b" exitCode=0 Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.646170 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"cc0949fe-630b-4f65-9c6c-7a87272586a2","Type":"ContainerDied","Data":"dd7846e5bce6ad3fb45b70be69da40b1fd5b6a02c477d51bbb2694afb8ec6b2b"} Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.649527 4861 generic.go:334] "Generic (PLEG): container finished" podID="e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1" containerID="0ec7640c6af20c2caf04de5beef0ac46e8b6494aa7a11b4b706ff2fc41f4a225" exitCode=0 Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.649561 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1","Type":"ContainerDied","Data":"0ec7640c6af20c2caf04de5beef0ac46e8b6494aa7a11b4b706ff2fc41f4a225"} Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.733089 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dzkq2"] Oct 03 13:49:26 crc kubenswrapper[4861]: I1003 13:49:26.734713 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerName="dnsmasq-dns" containerID="cri-o://6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829" gracePeriod=10 Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.491703 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:49:27 crc kubenswrapper[4861]: E1003 13:49:27.553950 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="a361bfa1-97ef-4ee2-bcfe-3763898cbc32" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.609206 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-dns-svc\") pod \"bc8b9936-3237-4bdf-99b5-94c8154217dc\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.609328 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-config\") pod \"bc8b9936-3237-4bdf-99b5-94c8154217dc\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.609406 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2npqg\" (UniqueName: \"kubernetes.io/projected/bc8b9936-3237-4bdf-99b5-94c8154217dc-kube-api-access-2npqg\") pod \"bc8b9936-3237-4bdf-99b5-94c8154217dc\" (UID: \"bc8b9936-3237-4bdf-99b5-94c8154217dc\") " Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.619423 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc8b9936-3237-4bdf-99b5-94c8154217dc-kube-api-access-2npqg" (OuterVolumeSpecName: "kube-api-access-2npqg") pod "bc8b9936-3237-4bdf-99b5-94c8154217dc" (UID: "bc8b9936-3237-4bdf-99b5-94c8154217dc"). InnerVolumeSpecName "kube-api-access-2npqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.649882 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc8b9936-3237-4bdf-99b5-94c8154217dc" (UID: "bc8b9936-3237-4bdf-99b5-94c8154217dc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.660338 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"cc0949fe-630b-4f65-9c6c-7a87272586a2","Type":"ContainerStarted","Data":"e0736460410c6316b3c31298fc4c8bb65410046a732a460c1ab6c3d0d7fd0e83"} Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.662862 4861 generic.go:334] "Generic (PLEG): container finished" podID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerID="6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829" exitCode=0 Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.662930 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" event={"ID":"bc8b9936-3237-4bdf-99b5-94c8154217dc","Type":"ContainerDied","Data":"6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829"} Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.662959 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" event={"ID":"bc8b9936-3237-4bdf-99b5-94c8154217dc","Type":"ContainerDied","Data":"29b2e3afe7f0330942ec8584c9fe0fc620d7b1daf252a262d3e0452d15b5b073"} Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.662972 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-dzkq2" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.662979 4861 scope.go:117] "RemoveContainer" containerID="6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.667667 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1","Type":"ContainerStarted","Data":"7f160b4fc01591491f51a5747750deb559b598352ed2df8e185bf7c41ffef0e5"} Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.676217 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-config" (OuterVolumeSpecName: "config") pod "bc8b9936-3237-4bdf-99b5-94c8154217dc" (UID: "bc8b9936-3237-4bdf-99b5-94c8154217dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.679260 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a361bfa1-97ef-4ee2-bcfe-3763898cbc32","Type":"ContainerStarted","Data":"e3e63a3def1a9b2ea9c3608c612cb2c66f16a147b81486d57f533ac4a303042d"} Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.686467 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6714f489-ceb2-4b99-a61d-fe45289bed5f","Type":"ContainerStarted","Data":"82c21967f8666b34b11a1952ed3fe84c02694effdba3f1ba1ec58ce5c9522959"} Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.701180 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=41.116038818 podStartE2EDuration="54.701142593s" podCreationTimestamp="2025-10-03 13:48:33 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.104779326 +0000 UTC m=+1056.102764373" lastFinishedPulling="2025-10-03 13:49:15.689883101 +0000 UTC m=+1069.687868148" observedRunningTime="2025-10-03 13:49:27.696292836 +0000 UTC m=+1081.694277903" watchObservedRunningTime="2025-10-03 13:49:27.701142593 +0000 UTC m=+1081.699127640" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.702914 4861 scope.go:117] "RemoveContainer" containerID="b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.711242 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.711276 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc8b9936-3237-4bdf-99b5-94c8154217dc-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.711289 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2npqg\" (UniqueName: \"kubernetes.io/projected/bc8b9936-3237-4bdf-99b5-94c8154217dc-kube-api-access-2npqg\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.739091 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=23.591587389 podStartE2EDuration="48.73906529s" podCreationTimestamp="2025-10-03 13:48:39 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.111957315 +0000 UTC m=+1056.109942362" lastFinishedPulling="2025-10-03 13:49:27.259435216 +0000 UTC m=+1081.257420263" observedRunningTime="2025-10-03 13:49:27.732985311 +0000 UTC m=+1081.730970358" watchObservedRunningTime="2025-10-03 13:49:27.73906529 +0000 UTC m=+1081.737050337" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.755607 4861 scope.go:117] "RemoveContainer" containerID="6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829" Oct 03 13:49:27 crc kubenswrapper[4861]: E1003 13:49:27.756186 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829\": container with ID starting with 6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829 not found: ID does not exist" containerID="6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.756604 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829"} err="failed to get container status \"6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829\": rpc error: code = NotFound desc = could not find container \"6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829\": container with ID starting with 6168d22a1bc581a267c56d4aa3ba9ad6df8f56547069fcefc064e9422ee9e829 not found: ID does not exist" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.756735 4861 scope.go:117] "RemoveContainer" containerID="b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81" Oct 03 13:49:27 crc kubenswrapper[4861]: E1003 13:49:27.758688 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81\": container with ID starting with b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81 not found: ID does not exist" containerID="b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.758809 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81"} err="failed to get container status \"b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81\": rpc error: code = NotFound desc = could not find container \"b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81\": container with ID starting with b894bf9efa10285ea00ee8c88b7a28dca014321507ff67f6e42ecf74c1ef1b81 not found: ID does not exist" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.774469 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=38.719097252 podStartE2EDuration="54.774448071s" podCreationTimestamp="2025-10-03 13:48:33 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.121642389 +0000 UTC m=+1056.119627436" lastFinishedPulling="2025-10-03 13:49:18.176993208 +0000 UTC m=+1072.174978255" observedRunningTime="2025-10-03 13:49:27.768070444 +0000 UTC m=+1081.766055511" watchObservedRunningTime="2025-10-03 13:49:27.774448071 +0000 UTC m=+1081.772433118" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.953602 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-k9jbl"] Oct 03 13:49:27 crc kubenswrapper[4861]: E1003 13:49:27.954285 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerName="dnsmasq-dns" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.954304 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerName="dnsmasq-dns" Oct 03 13:49:27 crc kubenswrapper[4861]: E1003 13:49:27.954321 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerName="init" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.954326 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerName="init" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.954476 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" containerName="dnsmasq-dns" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.960867 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:27 crc kubenswrapper[4861]: I1003 13:49:27.975374 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-k9jbl"] Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.024901 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dzkq2"] Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.030652 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-dzkq2"] Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.129215 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.129558 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-config\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.129736 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv55t\" (UniqueName: \"kubernetes.io/projected/f3338337-471a-4440-888e-32f253d64969-kube-api-access-hv55t\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.231604 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-config\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.231709 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv55t\" (UniqueName: \"kubernetes.io/projected/f3338337-471a-4440-888e-32f253d64969-kube-api-access-hv55t\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.231736 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.233139 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-config\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.233168 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.255400 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv55t\" (UniqueName: \"kubernetes.io/projected/f3338337-471a-4440-888e-32f253d64969-kube-api-access-hv55t\") pod \"dnsmasq-dns-7cb5889db5-k9jbl\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.286062 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.709217 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc8b9936-3237-4bdf-99b5-94c8154217dc" path="/var/lib/kubelet/pods/bc8b9936-3237-4bdf-99b5-94c8154217dc/volumes" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.735383 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a361bfa1-97ef-4ee2-bcfe-3763898cbc32","Type":"ContainerStarted","Data":"4e49fc615a3a9df3c182236fe5c6832f3abbbc08fd97531668fee13432e06870"} Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.794502 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=20.094881978 podStartE2EDuration="45.7944773s" podCreationTimestamp="2025-10-03 13:48:43 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.544900552 +0000 UTC m=+1056.542885599" lastFinishedPulling="2025-10-03 13:49:28.244495874 +0000 UTC m=+1082.242480921" observedRunningTime="2025-10-03 13:49:28.774522526 +0000 UTC m=+1082.772507593" watchObservedRunningTime="2025-10-03 13:49:28.7944773 +0000 UTC m=+1082.792462357" Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.797034 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-k9jbl"] Oct 03 13:49:28 crc kubenswrapper[4861]: I1003 13:49:28.947839 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.008293 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.067082 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.148163 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.148491 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.155741 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.156153 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-5m5bf" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.156176 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.156267 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.249956 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzw7v\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-kube-api-access-vzw7v\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.250208 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/532954b7-a9d5-4ddb-87af-b17408a5db8b-lock\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.250264 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.250329 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/532954b7-a9d5-4ddb-87af-b17408a5db8b-cache\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.250351 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.351882 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/532954b7-a9d5-4ddb-87af-b17408a5db8b-cache\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.352176 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: E1003 13:49:29.352592 4861 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:49:29 crc kubenswrapper[4861]: E1003 13:49:29.352698 4861 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:49:29 crc kubenswrapper[4861]: E1003 13:49:29.352822 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift podName:532954b7-a9d5-4ddb-87af-b17408a5db8b nodeName:}" failed. No retries permitted until 2025-10-03 13:49:29.852801075 +0000 UTC m=+1083.850786122 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift") pod "swift-storage-0" (UID: "532954b7-a9d5-4ddb-87af-b17408a5db8b") : configmap "swift-ring-files" not found Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.352928 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzw7v\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-kube-api-access-vzw7v\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.353037 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/532954b7-a9d5-4ddb-87af-b17408a5db8b-lock\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.352740 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/532954b7-a9d5-4ddb-87af-b17408a5db8b-cache\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.353222 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.353582 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/532954b7-a9d5-4ddb-87af-b17408a5db8b-lock\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.353810 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.381485 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzw7v\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-kube-api-access-vzw7v\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.381877 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.597842 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-s5lt7"] Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.599139 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.601332 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.601356 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.603290 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.627812 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s5lt7"] Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657312 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-dispersionconf\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657377 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g79sl\" (UniqueName: \"kubernetes.io/projected/971f0946-1828-4512-9b7d-2bafc5a78ef3-kube-api-access-g79sl\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657424 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-swiftconf\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657450 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-ring-data-devices\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657467 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-combined-ca-bundle\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657601 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/971f0946-1828-4512-9b7d-2bafc5a78ef3-etc-swift\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.657643 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-scripts\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.744137 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x5wkc" event={"ID":"ef2b767c-ef6a-4364-a3f0-14b68bee3986","Type":"ContainerStarted","Data":"257adac014b6a4f644165a5c00d7046874b61a38375b643435ff6263ce676d6a"} Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.744363 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-x5wkc" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.745356 4861 generic.go:334] "Generic (PLEG): container finished" podID="f3338337-471a-4440-888e-32f253d64969" containerID="1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96" exitCode=0 Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.745396 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" event={"ID":"f3338337-471a-4440-888e-32f253d64969","Type":"ContainerDied","Data":"1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96"} Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.745718 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" event={"ID":"f3338337-471a-4440-888e-32f253d64969","Type":"ContainerStarted","Data":"3878797ed26cd1af86f627455bb931050bf740b1db655e34617fa86eac2799a3"} Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.746137 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759293 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/971f0946-1828-4512-9b7d-2bafc5a78ef3-etc-swift\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759368 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-scripts\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759452 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-dispersionconf\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759504 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g79sl\" (UniqueName: \"kubernetes.io/projected/971f0946-1828-4512-9b7d-2bafc5a78ef3-kube-api-access-g79sl\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759578 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-swiftconf\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759616 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-ring-data-devices\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.759637 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-combined-ca-bundle\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.760506 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/971f0946-1828-4512-9b7d-2bafc5a78ef3-etc-swift\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.760772 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-scripts\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.761831 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-ring-data-devices\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.769503 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-dispersionconf\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.770533 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-combined-ca-bundle\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.772542 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-x5wkc" podStartSLOduration=23.288708994 podStartE2EDuration="49.772525864s" podCreationTimestamp="2025-10-03 13:48:40 +0000 UTC" firstStartedPulling="2025-10-03 13:49:01.954541514 +0000 UTC m=+1055.952526561" lastFinishedPulling="2025-10-03 13:49:28.438358384 +0000 UTC m=+1082.436343431" observedRunningTime="2025-10-03 13:49:29.770303966 +0000 UTC m=+1083.768289013" watchObservedRunningTime="2025-10-03 13:49:29.772525864 +0000 UTC m=+1083.770510911" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.774890 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-swiftconf\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.808010 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g79sl\" (UniqueName: \"kubernetes.io/projected/971f0946-1828-4512-9b7d-2bafc5a78ef3-kube-api-access-g79sl\") pod \"swift-ring-rebalance-s5lt7\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.817503 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.862124 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:29 crc kubenswrapper[4861]: E1003 13:49:29.866098 4861 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:49:29 crc kubenswrapper[4861]: E1003 13:49:29.866267 4861 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:49:29 crc kubenswrapper[4861]: E1003 13:49:29.866374 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift podName:532954b7-a9d5-4ddb-87af-b17408a5db8b nodeName:}" failed. No retries permitted until 2025-10-03 13:49:30.866357792 +0000 UTC m=+1084.864342839 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift") pod "swift-storage-0" (UID: "532954b7-a9d5-4ddb-87af-b17408a5db8b") : configmap "swift-ring-files" not found Oct 03 13:49:29 crc kubenswrapper[4861]: I1003 13:49:29.919561 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.105390 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-k9jbl"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.144728 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.144788 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.169734 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-6zxrw"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.174868 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.180865 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.209725 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-6zxrw"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.262404 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-xsmhv"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.273710 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.280405 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.291774 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-xsmhv"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.298844 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.298887 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.371742 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.371790 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.373068 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-config\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.373147 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j57n7\" (UniqueName: \"kubernetes.io/projected/1db49359-5414-446f-8049-45733109066a-kube-api-access-j57n7\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478345 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0636b1a6-6f21-4d14-8a07-014a3e9395c7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478433 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0636b1a6-6f21-4d14-8a07-014a3e9395c7-ovn-rundir\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478596 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0636b1a6-6f21-4d14-8a07-014a3e9395c7-combined-ca-bundle\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478760 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0636b1a6-6f21-4d14-8a07-014a3e9395c7-ovs-rundir\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478831 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-config\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478940 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j57n7\" (UniqueName: \"kubernetes.io/projected/1db49359-5414-446f-8049-45733109066a-kube-api-access-j57n7\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.478985 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0636b1a6-6f21-4d14-8a07-014a3e9395c7-config\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.479163 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6fpp\" (UniqueName: \"kubernetes.io/projected/0636b1a6-6f21-4d14-8a07-014a3e9395c7-kube-api-access-f6fpp\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.479218 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.479287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.480629 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.480961 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-config\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.481566 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.523588 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j57n7\" (UniqueName: \"kubernetes.io/projected/1db49359-5414-446f-8049-45733109066a-kube-api-access-j57n7\") pod \"dnsmasq-dns-74f6f696b9-6zxrw\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.547302 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.580857 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0636b1a6-6f21-4d14-8a07-014a3e9395c7-config\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.581403 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6fpp\" (UniqueName: \"kubernetes.io/projected/0636b1a6-6f21-4d14-8a07-014a3e9395c7-kube-api-access-f6fpp\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.581455 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0636b1a6-6f21-4d14-8a07-014a3e9395c7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.581570 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0636b1a6-6f21-4d14-8a07-014a3e9395c7-ovn-rundir\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.581596 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0636b1a6-6f21-4d14-8a07-014a3e9395c7-combined-ca-bundle\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.581738 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0636b1a6-6f21-4d14-8a07-014a3e9395c7-ovs-rundir\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.582202 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0636b1a6-6f21-4d14-8a07-014a3e9395c7-ovs-rundir\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.582683 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0636b1a6-6f21-4d14-8a07-014a3e9395c7-ovn-rundir\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.582964 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0636b1a6-6f21-4d14-8a07-014a3e9395c7-config\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.586613 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0636b1a6-6f21-4d14-8a07-014a3e9395c7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.587140 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0636b1a6-6f21-4d14-8a07-014a3e9395c7-combined-ca-bundle\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.610804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6fpp\" (UniqueName: \"kubernetes.io/projected/0636b1a6-6f21-4d14-8a07-014a3e9395c7-kube-api-access-f6fpp\") pod \"ovn-controller-metrics-xsmhv\" (UID: \"0636b1a6-6f21-4d14-8a07-014a3e9395c7\") " pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.665598 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s5lt7"] Oct 03 13:49:30 crc kubenswrapper[4861]: W1003 13:49:30.675215 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod971f0946_1828_4512_9b7d_2bafc5a78ef3.slice/crio-56905996314a13672aef12bdd08392229c6711a3d2dfb3c78e1bdcd5c7dde2e0 WatchSource:0}: Error finding container 56905996314a13672aef12bdd08392229c6711a3d2dfb3c78e1bdcd5c7dde2e0: Status 404 returned error can't find the container with id 56905996314a13672aef12bdd08392229c6711a3d2dfb3c78e1bdcd5c7dde2e0 Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.724627 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-6zxrw"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.754311 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5lt7" event={"ID":"971f0946-1828-4512-9b7d-2bafc5a78ef3","Type":"ContainerStarted","Data":"56905996314a13672aef12bdd08392229c6711a3d2dfb3c78e1bdcd5c7dde2e0"} Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.758945 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" event={"ID":"f3338337-471a-4440-888e-32f253d64969","Type":"ContainerStarted","Data":"88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe"} Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.766784 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-wv7zw"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.767989 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.777428 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.780123 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-wv7zw"] Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.824832 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" podStartSLOduration=3.824810341 podStartE2EDuration="3.824810341s" podCreationTimestamp="2025-10-03 13:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:49:30.794618546 +0000 UTC m=+1084.792603593" watchObservedRunningTime="2025-10-03 13:49:30.824810341 +0000 UTC m=+1084.822795388" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.893622 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.893697 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfxh8\" (UniqueName: \"kubernetes.io/projected/b558a2b4-cf02-41f6-a03d-b1050708ab99-kube-api-access-tfxh8\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.893767 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-dns-svc\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.893843 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.893868 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-config\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.893916 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.894146 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-xsmhv" Oct 03 13:49:30 crc kubenswrapper[4861]: E1003 13:49:30.896906 4861 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:49:30 crc kubenswrapper[4861]: E1003 13:49:30.896921 4861 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:49:30 crc kubenswrapper[4861]: E1003 13:49:30.896963 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift podName:532954b7-a9d5-4ddb-87af-b17408a5db8b nodeName:}" failed. No retries permitted until 2025-10-03 13:49:32.896946668 +0000 UTC m=+1086.894931715 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift") pod "swift-storage-0" (UID: "532954b7-a9d5-4ddb-87af-b17408a5db8b") : configmap "swift-ring-files" not found Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.995701 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.997743 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfxh8\" (UniqueName: \"kubernetes.io/projected/b558a2b4-cf02-41f6-a03d-b1050708ab99-kube-api-access-tfxh8\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.997361 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.998472 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-dns-svc\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.999176 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-dns-svc\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:30 crc kubenswrapper[4861]: I1003 13:49:30.999560 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-config\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.000538 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.000454 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-config\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.003173 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.033573 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfxh8\" (UniqueName: \"kubernetes.io/projected/b558a2b4-cf02-41f6-a03d-b1050708ab99-kube-api-access-tfxh8\") pod \"dnsmasq-dns-698758b865-wv7zw\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.089181 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.303804 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-6zxrw"] Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.371994 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-xsmhv"] Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.801487 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-xsmhv" event={"ID":"0636b1a6-6f21-4d14-8a07-014a3e9395c7","Type":"ContainerStarted","Data":"5d550788aade1e96b3a8397fff16c880063682d9775885af0e28111f627d3916"} Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.816827 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" podUID="f3338337-471a-4440-888e-32f253d64969" containerName="dnsmasq-dns" containerID="cri-o://88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe" gracePeriod=10 Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.817190 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" event={"ID":"1db49359-5414-446f-8049-45733109066a","Type":"ContainerStarted","Data":"51683cb2d4188783a8b83500de2cfecb5e1589bcd57d2a0901d9ee1643e4e7ca"} Oct 03 13:49:31 crc kubenswrapper[4861]: I1003 13:49:31.818528 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.218388 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-wv7zw"] Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.423761 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.545387 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv55t\" (UniqueName: \"kubernetes.io/projected/f3338337-471a-4440-888e-32f253d64969-kube-api-access-hv55t\") pod \"f3338337-471a-4440-888e-32f253d64969\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.545434 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-dns-svc\") pod \"f3338337-471a-4440-888e-32f253d64969\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.545589 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-config\") pod \"f3338337-471a-4440-888e-32f253d64969\" (UID: \"f3338337-471a-4440-888e-32f253d64969\") " Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.556339 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3338337-471a-4440-888e-32f253d64969-kube-api-access-hv55t" (OuterVolumeSpecName: "kube-api-access-hv55t") pod "f3338337-471a-4440-888e-32f253d64969" (UID: "f3338337-471a-4440-888e-32f253d64969"). InnerVolumeSpecName "kube-api-access-hv55t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.592051 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-config" (OuterVolumeSpecName: "config") pod "f3338337-471a-4440-888e-32f253d64969" (UID: "f3338337-471a-4440-888e-32f253d64969"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.602814 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f3338337-471a-4440-888e-32f253d64969" (UID: "f3338337-471a-4440-888e-32f253d64969"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.647459 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.647504 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv55t\" (UniqueName: \"kubernetes.io/projected/f3338337-471a-4440-888e-32f253d64969-kube-api-access-hv55t\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.647516 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3338337-471a-4440-888e-32f253d64969-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.851273 4861 generic.go:334] "Generic (PLEG): container finished" podID="1db49359-5414-446f-8049-45733109066a" containerID="11227d7c9deaea5f34f59dbebb0659e02986444e2968e2a7d7f65ca69e3b1e01" exitCode=0 Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.851352 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" event={"ID":"1db49359-5414-446f-8049-45733109066a","Type":"ContainerDied","Data":"11227d7c9deaea5f34f59dbebb0659e02986444e2968e2a7d7f65ca69e3b1e01"} Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.859772 4861 generic.go:334] "Generic (PLEG): container finished" podID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerID="9d9d62cbc75e1b27c7e788de68763370e681d62bcdcb5be585d79e8f7d417d4d" exitCode=0 Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.860424 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-wv7zw" event={"ID":"b558a2b4-cf02-41f6-a03d-b1050708ab99","Type":"ContainerDied","Data":"9d9d62cbc75e1b27c7e788de68763370e681d62bcdcb5be585d79e8f7d417d4d"} Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.860471 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-wv7zw" event={"ID":"b558a2b4-cf02-41f6-a03d-b1050708ab99","Type":"ContainerStarted","Data":"298a849d92bacd908c26967c775b1ba09891d19f70012c653747ede513213f02"} Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.869469 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-xsmhv" event={"ID":"0636b1a6-6f21-4d14-8a07-014a3e9395c7","Type":"ContainerStarted","Data":"db60764f0ed7c4d8f67551deed2923ed93d731618989d2a22adf5ca4aab9c97c"} Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.888356 4861 generic.go:334] "Generic (PLEG): container finished" podID="f3338337-471a-4440-888e-32f253d64969" containerID="88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe" exitCode=0 Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.888428 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" event={"ID":"f3338337-471a-4440-888e-32f253d64969","Type":"ContainerDied","Data":"88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe"} Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.888459 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" event={"ID":"f3338337-471a-4440-888e-32f253d64969","Type":"ContainerDied","Data":"3878797ed26cd1af86f627455bb931050bf740b1db655e34617fa86eac2799a3"} Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.888501 4861 scope.go:117] "RemoveContainer" containerID="88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.889267 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-k9jbl" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.927427 4861 scope.go:117] "RemoveContainer" containerID="1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.932101 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-xsmhv" podStartSLOduration=2.932081585 podStartE2EDuration="2.932081585s" podCreationTimestamp="2025-10-03 13:49:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:49:32.93113616 +0000 UTC m=+1086.929121227" watchObservedRunningTime="2025-10-03 13:49:32.932081585 +0000 UTC m=+1086.930066632" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.952286 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.954708 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-k9jbl"] Oct 03 13:49:32 crc kubenswrapper[4861]: E1003 13:49:32.954888 4861 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:49:32 crc kubenswrapper[4861]: E1003 13:49:32.954902 4861 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:49:32 crc kubenswrapper[4861]: E1003 13:49:32.954952 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift podName:532954b7-a9d5-4ddb-87af-b17408a5db8b nodeName:}" failed. No retries permitted until 2025-10-03 13:49:36.954933856 +0000 UTC m=+1090.952918893 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift") pod "swift-storage-0" (UID: "532954b7-a9d5-4ddb-87af-b17408a5db8b") : configmap "swift-ring-files" not found Oct 03 13:49:32 crc kubenswrapper[4861]: I1003 13:49:32.969546 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-k9jbl"] Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.021196 4861 scope.go:117] "RemoveContainer" containerID="88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe" Oct 03 13:49:33 crc kubenswrapper[4861]: E1003 13:49:33.024084 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe\": container with ID starting with 88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe not found: ID does not exist" containerID="88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.024135 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe"} err="failed to get container status \"88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe\": rpc error: code = NotFound desc = could not find container \"88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe\": container with ID starting with 88d02d608b6b6d17dd1c58218ad32bcd2e5ffbc85d2d5f0257d8f055da0d70fe not found: ID does not exist" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.024166 4861 scope.go:117] "RemoveContainer" containerID="1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96" Oct 03 13:49:33 crc kubenswrapper[4861]: E1003 13:49:33.024602 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96\": container with ID starting with 1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96 not found: ID does not exist" containerID="1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.024627 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96"} err="failed to get container status \"1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96\": rpc error: code = NotFound desc = could not find container \"1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96\": container with ID starting with 1eb4ee01c8b73b2ffe4316a784592edb43634988910f276a451736f102962f96 not found: ID does not exist" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.350810 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.368276 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.438750 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.462930 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j57n7\" (UniqueName: \"kubernetes.io/projected/1db49359-5414-446f-8049-45733109066a-kube-api-access-j57n7\") pod \"1db49359-5414-446f-8049-45733109066a\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.464354 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-config\") pod \"1db49359-5414-446f-8049-45733109066a\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.464425 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-ovsdbserver-nb\") pod \"1db49359-5414-446f-8049-45733109066a\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.470499 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-dns-svc\") pod \"1db49359-5414-446f-8049-45733109066a\" (UID: \"1db49359-5414-446f-8049-45733109066a\") " Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.532913 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1db49359-5414-446f-8049-45733109066a-kube-api-access-j57n7" (OuterVolumeSpecName: "kube-api-access-j57n7") pod "1db49359-5414-446f-8049-45733109066a" (UID: "1db49359-5414-446f-8049-45733109066a"). InnerVolumeSpecName "kube-api-access-j57n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.546820 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-config" (OuterVolumeSpecName: "config") pod "1db49359-5414-446f-8049-45733109066a" (UID: "1db49359-5414-446f-8049-45733109066a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.547027 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1db49359-5414-446f-8049-45733109066a" (UID: "1db49359-5414-446f-8049-45733109066a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.548226 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1db49359-5414-446f-8049-45733109066a" (UID: "1db49359-5414-446f-8049-45733109066a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.575181 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.575213 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j57n7\" (UniqueName: \"kubernetes.io/projected/1db49359-5414-446f-8049-45733109066a-kube-api-access-j57n7\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.575222 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.575245 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1db49359-5414-446f-8049-45733109066a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.646926 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 03 13:49:33 crc kubenswrapper[4861]: E1003 13:49:33.650387 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1db49359-5414-446f-8049-45733109066a" containerName="init" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.650421 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1db49359-5414-446f-8049-45733109066a" containerName="init" Oct 03 13:49:33 crc kubenswrapper[4861]: E1003 13:49:33.650450 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3338337-471a-4440-888e-32f253d64969" containerName="init" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.650458 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3338337-471a-4440-888e-32f253d64969" containerName="init" Oct 03 13:49:33 crc kubenswrapper[4861]: E1003 13:49:33.650471 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3338337-471a-4440-888e-32f253d64969" containerName="dnsmasq-dns" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.650478 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3338337-471a-4440-888e-32f253d64969" containerName="dnsmasq-dns" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.650760 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="1db49359-5414-446f-8049-45733109066a" containerName="init" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.650788 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3338337-471a-4440-888e-32f253d64969" containerName="dnsmasq-dns" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.654184 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.658762 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-7vxnk" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.658868 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.658989 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.659068 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.742392 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.787548 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfjgd\" (UniqueName: \"kubernetes.io/projected/b310d13d-da67-4151-948d-36a86d413a27-kube-api-access-bfjgd\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.787605 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.787690 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b310d13d-da67-4151-948d-36a86d413a27-config\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.787827 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b310d13d-da67-4151-948d-36a86d413a27-scripts\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.787871 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.787908 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b310d13d-da67-4151-948d-36a86d413a27-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.788174 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.890145 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b310d13d-da67-4151-948d-36a86d413a27-scripts\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.890201 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.890248 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b310d13d-da67-4151-948d-36a86d413a27-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.890861 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b310d13d-da67-4151-948d-36a86d413a27-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.890886 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b310d13d-da67-4151-948d-36a86d413a27-scripts\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.890908 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.891007 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfjgd\" (UniqueName: \"kubernetes.io/projected/b310d13d-da67-4151-948d-36a86d413a27-kube-api-access-bfjgd\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.891035 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.893739 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b310d13d-da67-4151-948d-36a86d413a27-config\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.894088 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.895156 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b310d13d-da67-4151-948d-36a86d413a27-config\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.895829 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.901247 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.901223 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-6zxrw" event={"ID":"1db49359-5414-446f-8049-45733109066a","Type":"ContainerDied","Data":"51683cb2d4188783a8b83500de2cfecb5e1589bcd57d2a0901d9ee1643e4e7ca"} Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.901858 4861 scope.go:117] "RemoveContainer" containerID="11227d7c9deaea5f34f59dbebb0659e02986444e2968e2a7d7f65ca69e3b1e01" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.906361 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310d13d-da67-4151-948d-36a86d413a27-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.908385 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-wv7zw" event={"ID":"b558a2b4-cf02-41f6-a03d-b1050708ab99","Type":"ContainerStarted","Data":"d5ed3b0cabd0b5fec90bc5d63dad187d2a1339d634d276984118f9748bc0047b"} Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.908460 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.909688 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfjgd\" (UniqueName: \"kubernetes.io/projected/b310d13d-da67-4151-948d-36a86d413a27-kube-api-access-bfjgd\") pod \"ovn-northd-0\" (UID: \"b310d13d-da67-4151-948d-36a86d413a27\") " pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.927995 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-wv7zw" podStartSLOduration=3.927974678 podStartE2EDuration="3.927974678s" podCreationTimestamp="2025-10-03 13:49:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:49:33.926296304 +0000 UTC m=+1087.924281361" watchObservedRunningTime="2025-10-03 13:49:33.927974678 +0000 UTC m=+1087.925959735" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.979505 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.992509 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-6zxrw"] Oct 03 13:49:33 crc kubenswrapper[4861]: I1003 13:49:33.994924 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-6zxrw"] Oct 03 13:49:34 crc kubenswrapper[4861]: I1003 13:49:34.690291 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1db49359-5414-446f-8049-45733109066a" path="/var/lib/kubelet/pods/1db49359-5414-446f-8049-45733109066a/volumes" Oct 03 13:49:34 crc kubenswrapper[4861]: I1003 13:49:34.690789 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3338337-471a-4440-888e-32f253d64969" path="/var/lib/kubelet/pods/f3338337-471a-4440-888e-32f253d64969/volumes" Oct 03 13:49:34 crc kubenswrapper[4861]: I1003 13:49:34.918827 4861 generic.go:334] "Generic (PLEG): container finished" podID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerID="9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665" exitCode=0 Oct 03 13:49:34 crc kubenswrapper[4861]: I1003 13:49:34.919091 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9bf9ee1-8038-4578-b10d-390a82c11290","Type":"ContainerDied","Data":"9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665"} Oct 03 13:49:34 crc kubenswrapper[4861]: I1003 13:49:34.924881 4861 generic.go:334] "Generic (PLEG): container finished" podID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerID="a457e9d6f265baf52dcad5b846aa53b26ec38f6a5d91ffdde9329266fe7937eb" exitCode=0 Oct 03 13:49:34 crc kubenswrapper[4861]: I1003 13:49:34.925120 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7","Type":"ContainerDied","Data":"a457e9d6f265baf52dcad5b846aa53b26ec38f6a5d91ffdde9329266fe7937eb"} Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.192924 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.192980 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.246270 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.311417 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.311471 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.386285 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.988435 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 03 13:49:35 crc kubenswrapper[4861]: I1003 13:49:35.999467 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.106432 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:37 crc kubenswrapper[4861]: E1003 13:49:37.107740 4861 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:49:37 crc kubenswrapper[4861]: E1003 13:49:37.107763 4861 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:49:37 crc kubenswrapper[4861]: E1003 13:49:37.110424 4861 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift podName:532954b7-a9d5-4ddb-87af-b17408a5db8b nodeName:}" failed. No retries permitted until 2025-10-03 13:49:45.11039539 +0000 UTC m=+1099.108380447 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift") pod "swift-storage-0" (UID: "532954b7-a9d5-4ddb-87af-b17408a5db8b") : configmap "swift-ring-files" not found Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.430318 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.992911 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9bf9ee1-8038-4578-b10d-390a82c11290","Type":"ContainerStarted","Data":"8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8"} Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.993447 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.995580 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b310d13d-da67-4151-948d-36a86d413a27","Type":"ContainerStarted","Data":"ed4ed7ea5576962edcd9199cd9d8cfde43832322845411b1c69be25b40302c84"} Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.997081 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5lt7" event={"ID":"971f0946-1828-4512-9b7d-2bafc5a78ef3","Type":"ContainerStarted","Data":"5374da70ce7e7d67d6f3134fcf32ce9f5ccdfd4c9d2ac7a75a50ae86c633d53a"} Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.998737 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7","Type":"ContainerStarted","Data":"9d80e2d6952baf608f74f963d8e2bda4ba73dfa74ab0e13c642bc8e6396c5daf"} Oct 03 13:49:37 crc kubenswrapper[4861]: I1003 13:49:37.999178 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:49:38 crc kubenswrapper[4861]: I1003 13:49:38.001118 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6bf9658-85f4-4e0b-9aa6-cf672e04a858","Type":"ContainerStarted","Data":"0a82d11036b5d4634c16c5cf9c05f099eb6047fbc529d01b940c17b558ee1783"} Oct 03 13:49:38 crc kubenswrapper[4861]: I1003 13:49:38.001658 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 13:49:38 crc kubenswrapper[4861]: I1003 13:49:38.029125 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=40.623928584 podStartE2EDuration="1m7.029103165s" podCreationTimestamp="2025-10-03 13:48:31 +0000 UTC" firstStartedPulling="2025-10-03 13:48:33.595590295 +0000 UTC m=+1027.593575342" lastFinishedPulling="2025-10-03 13:49:00.000764866 +0000 UTC m=+1053.998749923" observedRunningTime="2025-10-03 13:49:38.019948394 +0000 UTC m=+1092.017933451" watchObservedRunningTime="2025-10-03 13:49:38.029103165 +0000 UTC m=+1092.027088212" Oct 03 13:49:38 crc kubenswrapper[4861]: I1003 13:49:38.047688 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-s5lt7" podStartSLOduration=2.763393568 podStartE2EDuration="9.047672324s" podCreationTimestamp="2025-10-03 13:49:29 +0000 UTC" firstStartedPulling="2025-10-03 13:49:30.684061279 +0000 UTC m=+1084.682046326" lastFinishedPulling="2025-10-03 13:49:36.968340035 +0000 UTC m=+1090.966325082" observedRunningTime="2025-10-03 13:49:38.042148018 +0000 UTC m=+1092.040133065" watchObservedRunningTime="2025-10-03 13:49:38.047672324 +0000 UTC m=+1092.045657371" Oct 03 13:49:38 crc kubenswrapper[4861]: I1003 13:49:38.075160 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.78039432 podStartE2EDuration="1m8.075139355s" podCreationTimestamp="2025-10-03 13:48:30 +0000 UTC" firstStartedPulling="2025-10-03 13:48:32.976571129 +0000 UTC m=+1026.974556176" lastFinishedPulling="2025-10-03 13:49:01.271316154 +0000 UTC m=+1055.269301211" observedRunningTime="2025-10-03 13:49:38.073755179 +0000 UTC m=+1092.071740236" watchObservedRunningTime="2025-10-03 13:49:38.075139355 +0000 UTC m=+1092.073124402" Oct 03 13:49:39 crc kubenswrapper[4861]: I1003 13:49:39.032096 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b310d13d-da67-4151-948d-36a86d413a27","Type":"ContainerStarted","Data":"05b37785665267b5b1e607513e82c39cfe25853a90ba435a4d2a93810aa8365a"} Oct 03 13:49:40 crc kubenswrapper[4861]: I1003 13:49:40.038944 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b310d13d-da67-4151-948d-36a86d413a27","Type":"ContainerStarted","Data":"df06684f3b7a75c55bb783873a43dd36538bbc581f87e7a3f72373a5da081dd5"} Oct 03 13:49:40 crc kubenswrapper[4861]: I1003 13:49:40.039359 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 03 13:49:40 crc kubenswrapper[4861]: I1003 13:49:40.066438 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=5.745285691 podStartE2EDuration="7.06641849s" podCreationTimestamp="2025-10-03 13:49:33 +0000 UTC" firstStartedPulling="2025-10-03 13:49:37.474308763 +0000 UTC m=+1091.472293810" lastFinishedPulling="2025-10-03 13:49:38.795441562 +0000 UTC m=+1092.793426609" observedRunningTime="2025-10-03 13:49:40.062869897 +0000 UTC m=+1094.060854944" watchObservedRunningTime="2025-10-03 13:49:40.06641849 +0000 UTC m=+1094.064403537" Oct 03 13:49:40 crc kubenswrapper[4861]: I1003 13:49:40.069881 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=28.122772475 podStartE2EDuration="1m3.069870041s" podCreationTimestamp="2025-10-03 13:48:37 +0000 UTC" firstStartedPulling="2025-10-03 13:49:02.097750361 +0000 UTC m=+1056.095735408" lastFinishedPulling="2025-10-03 13:49:37.044847927 +0000 UTC m=+1091.042832974" observedRunningTime="2025-10-03 13:49:38.101214842 +0000 UTC m=+1092.099199889" watchObservedRunningTime="2025-10-03 13:49:40.069870041 +0000 UTC m=+1094.067855088" Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.090406 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.152693 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2ntbd"] Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.153134 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="dnsmasq-dns" containerID="cri-o://8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2" gracePeriod=10 Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.889500 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.992511 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-dns-svc\") pod \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.992587 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-config\") pod \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " Oct 03 13:49:41 crc kubenswrapper[4861]: I1003 13:49:41.993683 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgzjh\" (UniqueName: \"kubernetes.io/projected/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-kube-api-access-wgzjh\") pod \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\" (UID: \"23e02e1d-0ff1-4ce2-a299-5802ec3177cd\") " Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.018332 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-kube-api-access-wgzjh" (OuterVolumeSpecName: "kube-api-access-wgzjh") pod "23e02e1d-0ff1-4ce2-a299-5802ec3177cd" (UID: "23e02e1d-0ff1-4ce2-a299-5802ec3177cd"). InnerVolumeSpecName "kube-api-access-wgzjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.036291 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-config" (OuterVolumeSpecName: "config") pod "23e02e1d-0ff1-4ce2-a299-5802ec3177cd" (UID: "23e02e1d-0ff1-4ce2-a299-5802ec3177cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.060168 4861 generic.go:334] "Generic (PLEG): container finished" podID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerID="8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2" exitCode=0 Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.060213 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" event={"ID":"23e02e1d-0ff1-4ce2-a299-5802ec3177cd","Type":"ContainerDied","Data":"8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2"} Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.060257 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" event={"ID":"23e02e1d-0ff1-4ce2-a299-5802ec3177cd","Type":"ContainerDied","Data":"4da56b6dc594357510dfa410227eddaedac1ece1a8db396d1698d6f54fa1e160"} Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.060282 4861 scope.go:117] "RemoveContainer" containerID="8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.060409 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.074388 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "23e02e1d-0ff1-4ce2-a299-5802ec3177cd" (UID: "23e02e1d-0ff1-4ce2-a299-5802ec3177cd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.096465 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.096507 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.096523 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgzjh\" (UniqueName: \"kubernetes.io/projected/23e02e1d-0ff1-4ce2-a299-5802ec3177cd-kube-api-access-wgzjh\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.104209 4861 scope.go:117] "RemoveContainer" containerID="47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.129316 4861 scope.go:117] "RemoveContainer" containerID="8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2" Oct 03 13:49:42 crc kubenswrapper[4861]: E1003 13:49:42.129959 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2\": container with ID starting with 8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2 not found: ID does not exist" containerID="8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.130001 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2"} err="failed to get container status \"8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2\": rpc error: code = NotFound desc = could not find container \"8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2\": container with ID starting with 8913d8a60dcbfee14531956bc0a72e73205cb6a335e477869e28138bc4b72fc2 not found: ID does not exist" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.130031 4861 scope.go:117] "RemoveContainer" containerID="47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024" Oct 03 13:49:42 crc kubenswrapper[4861]: E1003 13:49:42.131476 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024\": container with ID starting with 47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024 not found: ID does not exist" containerID="47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.131515 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024"} err="failed to get container status \"47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024\": rpc error: code = NotFound desc = could not find container \"47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024\": container with ID starting with 47ae663a3936d48c3b110af4e2b830fdcb6c1fadd37d9fba3a95c3b0015f2024 not found: ID does not exist" Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.389128 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2ntbd"] Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.395878 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2ntbd"] Oct 03 13:49:42 crc kubenswrapper[4861]: I1003 13:49:42.691343 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" path="/var/lib/kubelet/pods/23e02e1d-0ff1-4ce2-a299-5802ec3177cd/volumes" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.081961 4861 generic.go:334] "Generic (PLEG): container finished" podID="971f0946-1828-4512-9b7d-2bafc5a78ef3" containerID="5374da70ce7e7d67d6f3134fcf32ce9f5ccdfd4c9d2ac7a75a50ae86c633d53a" exitCode=0 Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.082022 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5lt7" event={"ID":"971f0946-1828-4512-9b7d-2bafc5a78ef3","Type":"ContainerDied","Data":"5374da70ce7e7d67d6f3134fcf32ce9f5ccdfd4c9d2ac7a75a50ae86c633d53a"} Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.140337 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.158918 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/532954b7-a9d5-4ddb-87af-b17408a5db8b-etc-swift\") pod \"swift-storage-0\" (UID: \"532954b7-a9d5-4ddb-87af-b17408a5db8b\") " pod="openstack/swift-storage-0" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.161121 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.570676 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-6thsd"] Oct 03 13:49:45 crc kubenswrapper[4861]: E1003 13:49:45.571411 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="init" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.571435 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="init" Oct 03 13:49:45 crc kubenswrapper[4861]: E1003 13:49:45.571459 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="dnsmasq-dns" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.571468 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="dnsmasq-dns" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.571675 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="dnsmasq-dns" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.572385 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.583457 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6thsd"] Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.653051 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjpjm\" (UniqueName: \"kubernetes.io/projected/88033eb7-5000-426e-9cae-8352354b0ca5-kube-api-access-gjpjm\") pod \"keystone-db-create-6thsd\" (UID: \"88033eb7-5000-426e-9cae-8352354b0ca5\") " pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.753550 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.755287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjpjm\" (UniqueName: \"kubernetes.io/projected/88033eb7-5000-426e-9cae-8352354b0ca5-kube-api-access-gjpjm\") pod \"keystone-db-create-6thsd\" (UID: \"88033eb7-5000-426e-9cae-8352354b0ca5\") " pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:45 crc kubenswrapper[4861]: W1003 13:49:45.762307 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod532954b7_a9d5_4ddb_87af_b17408a5db8b.slice/crio-b5bd89bd86a261f4d91e3f0e97eef9ab1030812d3fb80e598d0ab28ed12e0642 WatchSource:0}: Error finding container b5bd89bd86a261f4d91e3f0e97eef9ab1030812d3fb80e598d0ab28ed12e0642: Status 404 returned error can't find the container with id b5bd89bd86a261f4d91e3f0e97eef9ab1030812d3fb80e598d0ab28ed12e0642 Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.807354 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjpjm\" (UniqueName: \"kubernetes.io/projected/88033eb7-5000-426e-9cae-8352354b0ca5-kube-api-access-gjpjm\") pod \"keystone-db-create-6thsd\" (UID: \"88033eb7-5000-426e-9cae-8352354b0ca5\") " pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.812392 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-clwv7"] Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.813625 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clwv7" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.823932 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-clwv7"] Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.858308 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nsrj\" (UniqueName: \"kubernetes.io/projected/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc-kube-api-access-8nsrj\") pod \"placement-db-create-clwv7\" (UID: \"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc\") " pod="openstack/placement-db-create-clwv7" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.897550 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.959730 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nsrj\" (UniqueName: \"kubernetes.io/projected/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc-kube-api-access-8nsrj\") pod \"placement-db-create-clwv7\" (UID: \"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc\") " pod="openstack/placement-db-create-clwv7" Oct 03 13:49:45 crc kubenswrapper[4861]: I1003 13:49:45.995361 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nsrj\" (UniqueName: \"kubernetes.io/projected/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc-kube-api-access-8nsrj\") pod \"placement-db-create-clwv7\" (UID: \"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc\") " pod="openstack/placement-db-create-clwv7" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.097112 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"b5bd89bd86a261f4d91e3f0e97eef9ab1030812d3fb80e598d0ab28ed12e0642"} Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.105520 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-6jwg6"] Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.107168 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.130763 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6jwg6"] Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.164057 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzkpj\" (UniqueName: \"kubernetes.io/projected/5bda74fa-3e6f-4a03-aa42-b3e1ea436936-kube-api-access-kzkpj\") pod \"glance-db-create-6jwg6\" (UID: \"5bda74fa-3e6f-4a03-aa42-b3e1ea436936\") " pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.168591 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clwv7" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.265498 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzkpj\" (UniqueName: \"kubernetes.io/projected/5bda74fa-3e6f-4a03-aa42-b3e1ea436936-kube-api-access-kzkpj\") pod \"glance-db-create-6jwg6\" (UID: \"5bda74fa-3e6f-4a03-aa42-b3e1ea436936\") " pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.315888 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzkpj\" (UniqueName: \"kubernetes.io/projected/5bda74fa-3e6f-4a03-aa42-b3e1ea436936-kube-api-access-kzkpj\") pod \"glance-db-create-6jwg6\" (UID: \"5bda74fa-3e6f-4a03-aa42-b3e1ea436936\") " pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.376156 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6thsd"] Oct 03 13:49:46 crc kubenswrapper[4861]: W1003 13:49:46.405475 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88033eb7_5000_426e_9cae_8352354b0ca5.slice/crio-7dba7ad2185f155ff1aaa3d58619a811c7643919abbfe19cbf6f5a6f660996e6 WatchSource:0}: Error finding container 7dba7ad2185f155ff1aaa3d58619a811c7643919abbfe19cbf6f5a6f660996e6: Status 404 returned error can't find the container with id 7dba7ad2185f155ff1aaa3d58619a811c7643919abbfe19cbf6f5a6f660996e6 Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.435608 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.514661 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581363 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-scripts\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581427 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-swiftconf\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581453 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-combined-ca-bundle\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581477 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g79sl\" (UniqueName: \"kubernetes.io/projected/971f0946-1828-4512-9b7d-2bafc5a78ef3-kube-api-access-g79sl\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581502 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-dispersionconf\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581903 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-ring-data-devices\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.581953 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/971f0946-1828-4512-9b7d-2bafc5a78ef3-etc-swift\") pod \"971f0946-1828-4512-9b7d-2bafc5a78ef3\" (UID: \"971f0946-1828-4512-9b7d-2bafc5a78ef3\") " Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.583814 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.585683 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/971f0946-1828-4512-9b7d-2bafc5a78ef3-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.588093 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/971f0946-1828-4512-9b7d-2bafc5a78ef3-kube-api-access-g79sl" (OuterVolumeSpecName: "kube-api-access-g79sl") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "kube-api-access-g79sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.624832 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.642081 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-2ntbd" podUID="23e02e1d-0ff1-4ce2-a299-5802ec3177cd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.98:5353: i/o timeout" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.646878 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-scripts" (OuterVolumeSpecName: "scripts") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.683199 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.683404 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g79sl\" (UniqueName: \"kubernetes.io/projected/971f0946-1828-4512-9b7d-2bafc5a78ef3-kube-api-access-g79sl\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.683436 4861 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.683447 4861 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/971f0946-1828-4512-9b7d-2bafc5a78ef3-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.683457 4861 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/971f0946-1828-4512-9b7d-2bafc5a78ef3-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.725973 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-clwv7"] Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.760029 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.782694 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "971f0946-1828-4512-9b7d-2bafc5a78ef3" (UID: "971f0946-1828-4512-9b7d-2bafc5a78ef3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.785171 4861 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:46 crc kubenswrapper[4861]: I1003 13:49:46.785203 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971f0946-1828-4512-9b7d-2bafc5a78ef3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.084791 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6jwg6"] Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.112034 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5lt7" event={"ID":"971f0946-1828-4512-9b7d-2bafc5a78ef3","Type":"ContainerDied","Data":"56905996314a13672aef12bdd08392229c6711a3d2dfb3c78e1bdcd5c7dde2e0"} Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.112075 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56905996314a13672aef12bdd08392229c6711a3d2dfb3c78e1bdcd5c7dde2e0" Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.112155 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5lt7" Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.117182 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clwv7" event={"ID":"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc","Type":"ContainerStarted","Data":"c2176b17df52661b4a9aaeae7773b800759bdd492e688f9d72ef348ce3a1eb5a"} Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.117221 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clwv7" event={"ID":"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc","Type":"ContainerStarted","Data":"301c280a485281e4d161a36d4c38f118554818af795d579cc87de656b8ea763e"} Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.121569 4861 generic.go:334] "Generic (PLEG): container finished" podID="88033eb7-5000-426e-9cae-8352354b0ca5" containerID="35c92713f444d21249d42a2920ff2bb17ad6c59e839303ffee5dd8340318b7cb" exitCode=0 Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.121611 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6thsd" event={"ID":"88033eb7-5000-426e-9cae-8352354b0ca5","Type":"ContainerDied","Data":"35c92713f444d21249d42a2920ff2bb17ad6c59e839303ffee5dd8340318b7cb"} Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.121634 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6thsd" event={"ID":"88033eb7-5000-426e-9cae-8352354b0ca5","Type":"ContainerStarted","Data":"7dba7ad2185f155ff1aaa3d58619a811c7643919abbfe19cbf6f5a6f660996e6"} Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.137802 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-clwv7" podStartSLOduration=2.137785842 podStartE2EDuration="2.137785842s" podCreationTimestamp="2025-10-03 13:49:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:49:47.132616946 +0000 UTC m=+1101.130601993" watchObservedRunningTime="2025-10-03 13:49:47.137785842 +0000 UTC m=+1101.135770889" Oct 03 13:49:47 crc kubenswrapper[4861]: I1003 13:49:47.805614 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.144011 4861 generic.go:334] "Generic (PLEG): container finished" podID="8eae347e-a22b-43bb-8fdb-f7bfb1022fdc" containerID="c2176b17df52661b4a9aaeae7773b800759bdd492e688f9d72ef348ce3a1eb5a" exitCode=0 Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.144106 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clwv7" event={"ID":"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc","Type":"ContainerDied","Data":"c2176b17df52661b4a9aaeae7773b800759bdd492e688f9d72ef348ce3a1eb5a"} Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.145829 4861 generic.go:334] "Generic (PLEG): container finished" podID="5bda74fa-3e6f-4a03-aa42-b3e1ea436936" containerID="5bed09adce703f23c3cdfdf5ee6ae74113e17a2b33d3d78950fa4125bc1fc24c" exitCode=0 Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.145866 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6jwg6" event={"ID":"5bda74fa-3e6f-4a03-aa42-b3e1ea436936","Type":"ContainerDied","Data":"5bed09adce703f23c3cdfdf5ee6ae74113e17a2b33d3d78950fa4125bc1fc24c"} Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.145880 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6jwg6" event={"ID":"5bda74fa-3e6f-4a03-aa42-b3e1ea436936","Type":"ContainerStarted","Data":"0fb6fbcdfba8e81013380158b710ec25c5a0e2a92b00b977ec9bf73def22097f"} Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.148339 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"5055720810a5aa3bcaa7e553d8c3ac288594df27d2de94e1f5a2767d6771c7a8"} Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.148421 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"a1360cd2134d77f25007c551062e8a96579262a4f42517f7fe520f4f0d07ff3d"} Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.148438 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"dfe3e2b9932ef0099a69b1e57c0f76e5cbc1b5a616a405637f00c9477f4b82a3"} Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.532787 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.624960 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjpjm\" (UniqueName: \"kubernetes.io/projected/88033eb7-5000-426e-9cae-8352354b0ca5-kube-api-access-gjpjm\") pod \"88033eb7-5000-426e-9cae-8352354b0ca5\" (UID: \"88033eb7-5000-426e-9cae-8352354b0ca5\") " Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.631243 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88033eb7-5000-426e-9cae-8352354b0ca5-kube-api-access-gjpjm" (OuterVolumeSpecName: "kube-api-access-gjpjm") pod "88033eb7-5000-426e-9cae-8352354b0ca5" (UID: "88033eb7-5000-426e-9cae-8352354b0ca5"). InnerVolumeSpecName "kube-api-access-gjpjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:48 crc kubenswrapper[4861]: I1003 13:49:48.727166 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjpjm\" (UniqueName: \"kubernetes.io/projected/88033eb7-5000-426e-9cae-8352354b0ca5-kube-api-access-gjpjm\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.158788 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6thsd" event={"ID":"88033eb7-5000-426e-9cae-8352354b0ca5","Type":"ContainerDied","Data":"7dba7ad2185f155ff1aaa3d58619a811c7643919abbfe19cbf6f5a6f660996e6"} Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.159365 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7dba7ad2185f155ff1aaa3d58619a811c7643919abbfe19cbf6f5a6f660996e6" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.159464 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6thsd" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.163974 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"da684d7e393d8e33899e8104ea0855de5c3afae84dc9b77350899bcac0a5e6ac"} Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.641202 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clwv7" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.648280 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.746777 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzkpj\" (UniqueName: \"kubernetes.io/projected/5bda74fa-3e6f-4a03-aa42-b3e1ea436936-kube-api-access-kzkpj\") pod \"5bda74fa-3e6f-4a03-aa42-b3e1ea436936\" (UID: \"5bda74fa-3e6f-4a03-aa42-b3e1ea436936\") " Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.747164 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nsrj\" (UniqueName: \"kubernetes.io/projected/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc-kube-api-access-8nsrj\") pod \"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc\" (UID: \"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc\") " Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.751751 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bda74fa-3e6f-4a03-aa42-b3e1ea436936-kube-api-access-kzkpj" (OuterVolumeSpecName: "kube-api-access-kzkpj") pod "5bda74fa-3e6f-4a03-aa42-b3e1ea436936" (UID: "5bda74fa-3e6f-4a03-aa42-b3e1ea436936"). InnerVolumeSpecName "kube-api-access-kzkpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.753555 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc-kube-api-access-8nsrj" (OuterVolumeSpecName: "kube-api-access-8nsrj") pod "8eae347e-a22b-43bb-8fdb-f7bfb1022fdc" (UID: "8eae347e-a22b-43bb-8fdb-f7bfb1022fdc"). InnerVolumeSpecName "kube-api-access-8nsrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.849338 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzkpj\" (UniqueName: \"kubernetes.io/projected/5bda74fa-3e6f-4a03-aa42-b3e1ea436936-kube-api-access-kzkpj\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:49 crc kubenswrapper[4861]: I1003 13:49:49.849370 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nsrj\" (UniqueName: \"kubernetes.io/projected/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc-kube-api-access-8nsrj\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:50 crc kubenswrapper[4861]: I1003 13:49:50.173623 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clwv7" Oct 03 13:49:50 crc kubenswrapper[4861]: I1003 13:49:50.173620 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clwv7" event={"ID":"8eae347e-a22b-43bb-8fdb-f7bfb1022fdc","Type":"ContainerDied","Data":"301c280a485281e4d161a36d4c38f118554818af795d579cc87de656b8ea763e"} Oct 03 13:49:50 crc kubenswrapper[4861]: I1003 13:49:50.173749 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="301c280a485281e4d161a36d4c38f118554818af795d579cc87de656b8ea763e" Oct 03 13:49:50 crc kubenswrapper[4861]: I1003 13:49:50.175854 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6jwg6" event={"ID":"5bda74fa-3e6f-4a03-aa42-b3e1ea436936","Type":"ContainerDied","Data":"0fb6fbcdfba8e81013380158b710ec25c5a0e2a92b00b977ec9bf73def22097f"} Oct 03 13:49:50 crc kubenswrapper[4861]: I1003 13:49:50.175884 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fb6fbcdfba8e81013380158b710ec25c5a0e2a92b00b977ec9bf73def22097f" Oct 03 13:49:50 crc kubenswrapper[4861]: I1003 13:49:50.175902 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6jwg6" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.186854 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"bdddfdc4e46ad53b0d182f7be975e6de73678b1e71a571ade18cb5f97624a6ae"} Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.187161 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"605c4e4476203df3360d9a2a902a7020d283eb33fdc02d3da46fdb2bd86d2354"} Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.187172 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"b242059471128239898c9f81af828bca751bea7cd9ce7a7764eebb9050042424"} Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.187180 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"f4b223ca9a6f06c4d620479b9a7d40e4188c80264dd19f8b4b5f83f657ba5967"} Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.362068 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.368359 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-j8c4m" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600051 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-x5wkc-config-4bwlt"] Oct 03 13:49:51 crc kubenswrapper[4861]: E1003 13:49:51.600458 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eae347e-a22b-43bb-8fdb-f7bfb1022fdc" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600480 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eae347e-a22b-43bb-8fdb-f7bfb1022fdc" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: E1003 13:49:51.600509 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="971f0946-1828-4512-9b7d-2bafc5a78ef3" containerName="swift-ring-rebalance" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600519 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="971f0946-1828-4512-9b7d-2bafc5a78ef3" containerName="swift-ring-rebalance" Oct 03 13:49:51 crc kubenswrapper[4861]: E1003 13:49:51.600540 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88033eb7-5000-426e-9cae-8352354b0ca5" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600550 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="88033eb7-5000-426e-9cae-8352354b0ca5" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: E1003 13:49:51.600569 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bda74fa-3e6f-4a03-aa42-b3e1ea436936" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600576 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bda74fa-3e6f-4a03-aa42-b3e1ea436936" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600766 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eae347e-a22b-43bb-8fdb-f7bfb1022fdc" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600823 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="971f0946-1828-4512-9b7d-2bafc5a78ef3" containerName="swift-ring-rebalance" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600860 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="88033eb7-5000-426e-9cae-8352354b0ca5" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.600877 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bda74fa-3e6f-4a03-aa42-b3e1ea436936" containerName="mariadb-database-create" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.601525 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.604585 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.614222 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x5wkc-config-4bwlt"] Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.779720 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-scripts\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.780777 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-log-ovn\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.780921 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.781133 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67cjz\" (UniqueName: \"kubernetes.io/projected/7536c672-a720-4761-a516-cbd3d811a949-kube-api-access-67cjz\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.781274 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-additional-scripts\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.781381 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run-ovn\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.882537 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67cjz\" (UniqueName: \"kubernetes.io/projected/7536c672-a720-4761-a516-cbd3d811a949-kube-api-access-67cjz\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.882597 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-additional-scripts\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.882643 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run-ovn\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.882673 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-scripts\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.882704 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-log-ovn\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.882744 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.883102 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.883164 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-log-ovn\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.883180 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run-ovn\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.883908 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-additional-scripts\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.885522 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-scripts\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.906209 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67cjz\" (UniqueName: \"kubernetes.io/projected/7536c672-a720-4761-a516-cbd3d811a949-kube-api-access-67cjz\") pod \"ovn-controller-x5wkc-config-4bwlt\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:51 crc kubenswrapper[4861]: I1003 13:49:51.961112 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:52 crc kubenswrapper[4861]: I1003 13:49:52.373318 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:49:52 crc kubenswrapper[4861]: I1003 13:49:52.458682 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x5wkc-config-4bwlt"] Oct 03 13:49:52 crc kubenswrapper[4861]: I1003 13:49:52.775540 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 13:49:53 crc kubenswrapper[4861]: W1003 13:49:53.067306 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7536c672_a720_4761_a516_cbd3d811a949.slice/crio-f087547e51d0143728beb15dd7d52f74226a73082763383f3bc79ac17e36f550 WatchSource:0}: Error finding container f087547e51d0143728beb15dd7d52f74226a73082763383f3bc79ac17e36f550: Status 404 returned error can't find the container with id f087547e51d0143728beb15dd7d52f74226a73082763383f3bc79ac17e36f550 Oct 03 13:49:53 crc kubenswrapper[4861]: I1003 13:49:53.206959 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x5wkc-config-4bwlt" event={"ID":"7536c672-a720-4761-a516-cbd3d811a949","Type":"ContainerStarted","Data":"f087547e51d0143728beb15dd7d52f74226a73082763383f3bc79ac17e36f550"} Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.071035 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.219505 4861 generic.go:334] "Generic (PLEG): container finished" podID="7536c672-a720-4761-a516-cbd3d811a949" containerID="60250a7670cba43875a0bb507a6bf0501a8f81070df90b0d0da123ba0742bc8f" exitCode=0 Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.219553 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x5wkc-config-4bwlt" event={"ID":"7536c672-a720-4761-a516-cbd3d811a949","Type":"ContainerDied","Data":"60250a7670cba43875a0bb507a6bf0501a8f81070df90b0d0da123ba0742bc8f"} Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.284264 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lh2dn"] Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.285502 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.305182 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lh2dn"] Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.381041 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-7jdcl"] Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.382062 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.388146 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7jdcl"] Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.439656 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5r6s\" (UniqueName: \"kubernetes.io/projected/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4-kube-api-access-t5r6s\") pod \"barbican-db-create-7jdcl\" (UID: \"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4\") " pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.439763 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhzmk\" (UniqueName: \"kubernetes.io/projected/f2641015-4563-45ac-bfa5-e19fcc791806-kube-api-access-lhzmk\") pod \"cinder-db-create-lh2dn\" (UID: \"f2641015-4563-45ac-bfa5-e19fcc791806\") " pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.541157 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhzmk\" (UniqueName: \"kubernetes.io/projected/f2641015-4563-45ac-bfa5-e19fcc791806-kube-api-access-lhzmk\") pod \"cinder-db-create-lh2dn\" (UID: \"f2641015-4563-45ac-bfa5-e19fcc791806\") " pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.541354 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5r6s\" (UniqueName: \"kubernetes.io/projected/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4-kube-api-access-t5r6s\") pod \"barbican-db-create-7jdcl\" (UID: \"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4\") " pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.561793 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5r6s\" (UniqueName: \"kubernetes.io/projected/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4-kube-api-access-t5r6s\") pod \"barbican-db-create-7jdcl\" (UID: \"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4\") " pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.575324 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhzmk\" (UniqueName: \"kubernetes.io/projected/f2641015-4563-45ac-bfa5-e19fcc791806-kube-api-access-lhzmk\") pod \"cinder-db-create-lh2dn\" (UID: \"f2641015-4563-45ac-bfa5-e19fcc791806\") " pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.600985 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-4gn45"] Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.604104 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.609170 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-4gn45"] Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.619526 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.714006 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.744207 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48cfk\" (UniqueName: \"kubernetes.io/projected/6f60eab9-bc53-4007-bc7e-261845584dae-kube-api-access-48cfk\") pod \"neutron-db-create-4gn45\" (UID: \"6f60eab9-bc53-4007-bc7e-261845584dae\") " pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.846013 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48cfk\" (UniqueName: \"kubernetes.io/projected/6f60eab9-bc53-4007-bc7e-261845584dae-kube-api-access-48cfk\") pod \"neutron-db-create-4gn45\" (UID: \"6f60eab9-bc53-4007-bc7e-261845584dae\") " pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:54 crc kubenswrapper[4861]: I1003 13:49:54.888433 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48cfk\" (UniqueName: \"kubernetes.io/projected/6f60eab9-bc53-4007-bc7e-261845584dae-kube-api-access-48cfk\") pod \"neutron-db-create-4gn45\" (UID: \"6f60eab9-bc53-4007-bc7e-261845584dae\") " pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.023664 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.181887 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lh2dn"] Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.282094 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"8d2181c43b32a83adf6d90d9903a819801efe7b3acee067ac0b52fc1e379ae99"} Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.282361 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"65a98ae86b001f3efac43619996696d2a0313503802e00ff5fd5e1141c0f5511"} Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.411063 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7jdcl"] Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.612033 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8a29-account-create-qtclr"] Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.613325 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.625533 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.635712 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8a29-account-create-qtclr"] Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.666061 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pl6t\" (UniqueName: \"kubernetes.io/projected/900785d8-c3f4-4ba6-a925-56165ba86a6d-kube-api-access-4pl6t\") pod \"keystone-8a29-account-create-qtclr\" (UID: \"900785d8-c3f4-4ba6-a925-56165ba86a6d\") " pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.767411 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pl6t\" (UniqueName: \"kubernetes.io/projected/900785d8-c3f4-4ba6-a925-56165ba86a6d-kube-api-access-4pl6t\") pod \"keystone-8a29-account-create-qtclr\" (UID: \"900785d8-c3f4-4ba6-a925-56165ba86a6d\") " pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:55 crc kubenswrapper[4861]: W1003 13:49:55.796187 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f60eab9_bc53_4007_bc7e_261845584dae.slice/crio-b16c3000f90e99b4a4581e00a0f9333686562f1c9425984f77d6b5a436f4668d WatchSource:0}: Error finding container b16c3000f90e99b4a4581e00a0f9333686562f1c9425984f77d6b5a436f4668d: Status 404 returned error can't find the container with id b16c3000f90e99b4a4581e00a0f9333686562f1c9425984f77d6b5a436f4668d Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.798121 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-4gn45"] Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.806315 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pl6t\" (UniqueName: \"kubernetes.io/projected/900785d8-c3f4-4ba6-a925-56165ba86a6d-kube-api-access-4pl6t\") pod \"keystone-8a29-account-create-qtclr\" (UID: \"900785d8-c3f4-4ba6-a925-56165ba86a6d\") " pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:55 crc kubenswrapper[4861]: I1003 13:49:55.948937 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.048282 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-119e-account-create-jtptm"] Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.052147 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.055722 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.069302 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-119e-account-create-jtptm"] Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.181892 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qv9q\" (UniqueName: \"kubernetes.io/projected/a769cb74-01d8-4a40-8a92-4cc15f1d418a-kube-api-access-6qv9q\") pod \"placement-119e-account-create-jtptm\" (UID: \"a769cb74-01d8-4a40-8a92-4cc15f1d418a\") " pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.283282 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qv9q\" (UniqueName: \"kubernetes.io/projected/a769cb74-01d8-4a40-8a92-4cc15f1d418a-kube-api-access-6qv9q\") pod \"placement-119e-account-create-jtptm\" (UID: \"a769cb74-01d8-4a40-8a92-4cc15f1d418a\") " pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.332832 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qv9q\" (UniqueName: \"kubernetes.io/projected/a769cb74-01d8-4a40-8a92-4cc15f1d418a-kube-api-access-6qv9q\") pod \"placement-119e-account-create-jtptm\" (UID: \"a769cb74-01d8-4a40-8a92-4cc15f1d418a\") " pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.336498 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4gn45" event={"ID":"6f60eab9-bc53-4007-bc7e-261845584dae","Type":"ContainerStarted","Data":"b16c3000f90e99b4a4581e00a0f9333686562f1c9425984f77d6b5a436f4668d"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.367421 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jdcl" event={"ID":"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4","Type":"ContainerStarted","Data":"f8d2efc99fc79879962fc1bd0c0c703a2f8c52aa7de6832c75cb14b66b64ff12"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.367773 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jdcl" event={"ID":"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4","Type":"ContainerStarted","Data":"a88e3223a5eb8129587cf8e323d6e1301f10762d515d92d32daa10a6c02bbbaa"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.381909 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.387310 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-a1af-account-create-r7n7q"] Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.388812 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.400876 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.412786 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a1af-account-create-r7n7q"] Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.419625 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-7jdcl" podStartSLOduration=2.419604917 podStartE2EDuration="2.419604917s" podCreationTimestamp="2025-10-03 13:49:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:49:56.390826364 +0000 UTC m=+1110.388811411" watchObservedRunningTime="2025-10-03 13:49:56.419604917 +0000 UTC m=+1110.417589964" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.433754 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"623bc5b52d8d04ec281b914254da65d6eb3974668b940b7503f6538920b51ce8"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.433802 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"97ad887220c5667470bb43c0e921217a8112a691b89632649f48e5786ab555aa"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.445662 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lh2dn" event={"ID":"f2641015-4563-45ac-bfa5-e19fcc791806","Type":"ContainerStarted","Data":"d52e054065d56b47da01e205478c11cd4629fde413f4de1ac80393ae839fa96d"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.445717 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lh2dn" event={"ID":"f2641015-4563-45ac-bfa5-e19fcc791806","Type":"ContainerStarted","Data":"e1ef32c1814c388635040a72b4a96163aa12acf43c64a7200baa4a7a345e469f"} Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.447823 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.514218 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run-ovn\") pod \"7536c672-a720-4761-a516-cbd3d811a949\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.515698 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-additional-scripts\") pod \"7536c672-a720-4761-a516-cbd3d811a949\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.515376 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7536c672-a720-4761-a516-cbd3d811a949" (UID: "7536c672-a720-4761-a516-cbd3d811a949"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.515896 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67cjz\" (UniqueName: \"kubernetes.io/projected/7536c672-a720-4761-a516-cbd3d811a949-kube-api-access-67cjz\") pod \"7536c672-a720-4761-a516-cbd3d811a949\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.520460 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-scripts\") pod \"7536c672-a720-4761-a516-cbd3d811a949\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.520523 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-log-ovn\") pod \"7536c672-a720-4761-a516-cbd3d811a949\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.520634 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run\") pod \"7536c672-a720-4761-a516-cbd3d811a949\" (UID: \"7536c672-a720-4761-a516-cbd3d811a949\") " Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.520465 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "7536c672-a720-4761-a516-cbd3d811a949" (UID: "7536c672-a720-4761-a516-cbd3d811a949"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521065 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7536c672-a720-4761-a516-cbd3d811a949" (UID: "7536c672-a720-4761-a516-cbd3d811a949"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521112 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run" (OuterVolumeSpecName: "var-run") pod "7536c672-a720-4761-a516-cbd3d811a949" (UID: "7536c672-a720-4761-a516-cbd3d811a949"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521186 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxhfq\" (UniqueName: \"kubernetes.io/projected/4be391e0-99d9-4191-95f7-416e9e893b30-kube-api-access-gxhfq\") pod \"glance-a1af-account-create-r7n7q\" (UID: \"4be391e0-99d9-4191-95f7-416e9e893b30\") " pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521508 4861 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521533 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-scripts" (OuterVolumeSpecName: "scripts") pod "7536c672-a720-4761-a516-cbd3d811a949" (UID: "7536c672-a720-4761-a516-cbd3d811a949"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521539 4861 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521603 4861 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.521612 4861 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7536c672-a720-4761-a516-cbd3d811a949-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.537666 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7536c672-a720-4761-a516-cbd3d811a949-kube-api-access-67cjz" (OuterVolumeSpecName: "kube-api-access-67cjz") pod "7536c672-a720-4761-a516-cbd3d811a949" (UID: "7536c672-a720-4761-a516-cbd3d811a949"). InnerVolumeSpecName "kube-api-access-67cjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.622983 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxhfq\" (UniqueName: \"kubernetes.io/projected/4be391e0-99d9-4191-95f7-416e9e893b30-kube-api-access-gxhfq\") pod \"glance-a1af-account-create-r7n7q\" (UID: \"4be391e0-99d9-4191-95f7-416e9e893b30\") " pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.623531 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67cjz\" (UniqueName: \"kubernetes.io/projected/7536c672-a720-4761-a516-cbd3d811a949-kube-api-access-67cjz\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.623566 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7536c672-a720-4761-a516-cbd3d811a949-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.643807 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxhfq\" (UniqueName: \"kubernetes.io/projected/4be391e0-99d9-4191-95f7-416e9e893b30-kube-api-access-gxhfq\") pod \"glance-a1af-account-create-r7n7q\" (UID: \"4be391e0-99d9-4191-95f7-416e9e893b30\") " pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.738037 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8a29-account-create-qtclr"] Oct 03 13:49:56 crc kubenswrapper[4861]: W1003 13:49:56.758424 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod900785d8_c3f4_4ba6_a925_56165ba86a6d.slice/crio-d7863b8dfc2cd8781e9706b2e990f6da02031945bd3c59aeede132a8bd4ddf1b WatchSource:0}: Error finding container d7863b8dfc2cd8781e9706b2e990f6da02031945bd3c59aeede132a8bd4ddf1b: Status 404 returned error can't find the container with id d7863b8dfc2cd8781e9706b2e990f6da02031945bd3c59aeede132a8bd4ddf1b Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.801324 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:49:56 crc kubenswrapper[4861]: I1003 13:49:56.969682 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-119e-account-create-jtptm"] Oct 03 13:49:57 crc kubenswrapper[4861]: W1003 13:49:57.021702 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda769cb74_01d8_4a40_8a92_4cc15f1d418a.slice/crio-734b23980f5d03cfc31d622b02af1adf8dcb71a558e6db81bd9ed17e74d86922 WatchSource:0}: Error finding container 734b23980f5d03cfc31d622b02af1adf8dcb71a558e6db81bd9ed17e74d86922: Status 404 returned error can't find the container with id 734b23980f5d03cfc31d622b02af1adf8dcb71a558e6db81bd9ed17e74d86922 Oct 03 13:49:57 crc kubenswrapper[4861]: W1003 13:49:57.411294 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4be391e0_99d9_4191_95f7_416e9e893b30.slice/crio-077791390a3b8e6f82813ba0babf04d1132ec2b2fa001d8e6e89856f1c417ad8 WatchSource:0}: Error finding container 077791390a3b8e6f82813ba0babf04d1132ec2b2fa001d8e6e89856f1c417ad8: Status 404 returned error can't find the container with id 077791390a3b8e6f82813ba0babf04d1132ec2b2fa001d8e6e89856f1c417ad8 Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.413404 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a1af-account-create-r7n7q"] Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.458770 4861 generic.go:334] "Generic (PLEG): container finished" podID="900785d8-c3f4-4ba6-a925-56165ba86a6d" containerID="ba4b4f89c07fc32412e3361f1e92e19badc5f598fbc66c3466eb6a941467435c" exitCode=0 Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.458894 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8a29-account-create-qtclr" event={"ID":"900785d8-c3f4-4ba6-a925-56165ba86a6d","Type":"ContainerDied","Data":"ba4b4f89c07fc32412e3361f1e92e19badc5f598fbc66c3466eb6a941467435c"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.458927 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8a29-account-create-qtclr" event={"ID":"900785d8-c3f4-4ba6-a925-56165ba86a6d","Type":"ContainerStarted","Data":"d7863b8dfc2cd8781e9706b2e990f6da02031945bd3c59aeede132a8bd4ddf1b"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.462465 4861 generic.go:334] "Generic (PLEG): container finished" podID="1a715a54-ad54-4756-aa5c-32ac4cc2e1f4" containerID="f8d2efc99fc79879962fc1bd0c0c703a2f8c52aa7de6832c75cb14b66b64ff12" exitCode=0 Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.462535 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jdcl" event={"ID":"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4","Type":"ContainerDied","Data":"f8d2efc99fc79879962fc1bd0c0c703a2f8c52aa7de6832c75cb14b66b64ff12"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.478058 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"142432be997dff0c86fd0a984e8c953e3730ad73bc4d535a3465dcd16f3e6e32"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.478126 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"4f9361ec384ee120f73661153f0dd262d0617f5d00449876e491a777332b72ce"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.478165 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"532954b7-a9d5-4ddb-87af-b17408a5db8b","Type":"ContainerStarted","Data":"1f089eab34fd2291d9c6e7b11bf784e51f24e63d01657332bf61017cb3f34a83"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.480179 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-119e-account-create-jtptm" event={"ID":"a769cb74-01d8-4a40-8a92-4cc15f1d418a","Type":"ContainerStarted","Data":"5e728610a658b78264dfefb74148a39a877d4c44dedf122ee62143cfe39e8b7d"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.480204 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-119e-account-create-jtptm" event={"ID":"a769cb74-01d8-4a40-8a92-4cc15f1d418a","Type":"ContainerStarted","Data":"734b23980f5d03cfc31d622b02af1adf8dcb71a558e6db81bd9ed17e74d86922"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.481120 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a1af-account-create-r7n7q" event={"ID":"4be391e0-99d9-4191-95f7-416e9e893b30","Type":"ContainerStarted","Data":"077791390a3b8e6f82813ba0babf04d1132ec2b2fa001d8e6e89856f1c417ad8"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.482146 4861 generic.go:334] "Generic (PLEG): container finished" podID="f2641015-4563-45ac-bfa5-e19fcc791806" containerID="d52e054065d56b47da01e205478c11cd4629fde413f4de1ac80393ae839fa96d" exitCode=0 Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.482186 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lh2dn" event={"ID":"f2641015-4563-45ac-bfa5-e19fcc791806","Type":"ContainerDied","Data":"d52e054065d56b47da01e205478c11cd4629fde413f4de1ac80393ae839fa96d"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.492839 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x5wkc-config-4bwlt" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.493390 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x5wkc-config-4bwlt" event={"ID":"7536c672-a720-4761-a516-cbd3d811a949","Type":"ContainerDied","Data":"f087547e51d0143728beb15dd7d52f74226a73082763383f3bc79ac17e36f550"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.493429 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f087547e51d0143728beb15dd7d52f74226a73082763383f3bc79ac17e36f550" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.512832 4861 generic.go:334] "Generic (PLEG): container finished" podID="6f60eab9-bc53-4007-bc7e-261845584dae" containerID="b274a087c1406dd905bded959a2774284b72781b5a9b6e4dc14bd8aa513e5d38" exitCode=0 Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.513188 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4gn45" event={"ID":"6f60eab9-bc53-4007-bc7e-261845584dae","Type":"ContainerDied","Data":"b274a087c1406dd905bded959a2774284b72781b5a9b6e4dc14bd8aa513e5d38"} Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.572343 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=21.294308533 podStartE2EDuration="29.572323605s" podCreationTimestamp="2025-10-03 13:49:28 +0000 UTC" firstStartedPulling="2025-10-03 13:49:45.764501432 +0000 UTC m=+1099.762486479" lastFinishedPulling="2025-10-03 13:49:54.042516494 +0000 UTC m=+1108.040501551" observedRunningTime="2025-10-03 13:49:57.56073814 +0000 UTC m=+1111.558723207" watchObservedRunningTime="2025-10-03 13:49:57.572323605 +0000 UTC m=+1111.570308652" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.597803 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-x5wkc-config-4bwlt"] Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.608485 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-x5wkc-config-4bwlt"] Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.894812 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-wv5dm"] Oct 03 13:49:57 crc kubenswrapper[4861]: E1003 13:49:57.895120 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7536c672-a720-4761-a516-cbd3d811a949" containerName="ovn-config" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.895137 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7536c672-a720-4761-a516-cbd3d811a949" containerName="ovn-config" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.900941 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7536c672-a720-4761-a516-cbd3d811a949" containerName="ovn-config" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.902193 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.905696 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.933861 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.935836 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-wv5dm"] Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.958744 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhzmk\" (UniqueName: \"kubernetes.io/projected/f2641015-4563-45ac-bfa5-e19fcc791806-kube-api-access-lhzmk\") pod \"f2641015-4563-45ac-bfa5-e19fcc791806\" (UID: \"f2641015-4563-45ac-bfa5-e19fcc791806\") " Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.959024 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.959071 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.959106 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-config\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.959140 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p869c\" (UniqueName: \"kubernetes.io/projected/658e6aab-e882-407b-bd13-1c293b326996-kube-api-access-p869c\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.959168 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.959188 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:57 crc kubenswrapper[4861]: I1003 13:49:57.966463 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2641015-4563-45ac-bfa5-e19fcc791806-kube-api-access-lhzmk" (OuterVolumeSpecName: "kube-api-access-lhzmk") pod "f2641015-4563-45ac-bfa5-e19fcc791806" (UID: "f2641015-4563-45ac-bfa5-e19fcc791806"). InnerVolumeSpecName "kube-api-access-lhzmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060275 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060353 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-config\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060408 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p869c\" (UniqueName: \"kubernetes.io/projected/658e6aab-e882-407b-bd13-1c293b326996-kube-api-access-p869c\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060448 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060475 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060539 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.060624 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhzmk\" (UniqueName: \"kubernetes.io/projected/f2641015-4563-45ac-bfa5-e19fcc791806-kube-api-access-lhzmk\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.061556 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.061556 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-config\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.061559 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.062493 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.062636 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.078979 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p869c\" (UniqueName: \"kubernetes.io/projected/658e6aab-e882-407b-bd13-1c293b326996-kube-api-access-p869c\") pod \"dnsmasq-dns-77585f5f8c-wv5dm\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.261489 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.524171 4861 generic.go:334] "Generic (PLEG): container finished" podID="a769cb74-01d8-4a40-8a92-4cc15f1d418a" containerID="5e728610a658b78264dfefb74148a39a877d4c44dedf122ee62143cfe39e8b7d" exitCode=0 Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.524272 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-119e-account-create-jtptm" event={"ID":"a769cb74-01d8-4a40-8a92-4cc15f1d418a","Type":"ContainerDied","Data":"5e728610a658b78264dfefb74148a39a877d4c44dedf122ee62143cfe39e8b7d"} Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.527754 4861 generic.go:334] "Generic (PLEG): container finished" podID="4be391e0-99d9-4191-95f7-416e9e893b30" containerID="546852f78f46e7728d2b4ef1e06b55e7af4dee473bce7508ecc536170265dbb9" exitCode=0 Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.527811 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a1af-account-create-r7n7q" event={"ID":"4be391e0-99d9-4191-95f7-416e9e893b30","Type":"ContainerDied","Data":"546852f78f46e7728d2b4ef1e06b55e7af4dee473bce7508ecc536170265dbb9"} Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.532197 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lh2dn" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.538971 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lh2dn" event={"ID":"f2641015-4563-45ac-bfa5-e19fcc791806","Type":"ContainerDied","Data":"e1ef32c1814c388635040a72b4a96163aa12acf43c64a7200baa4a7a345e469f"} Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.539012 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1ef32c1814c388635040a72b4a96163aa12acf43c64a7200baa4a7a345e469f" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.693662 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7536c672-a720-4761-a516-cbd3d811a949" path="/var/lib/kubelet/pods/7536c672-a720-4761-a516-cbd3d811a949/volumes" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.726094 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-wv5dm"] Oct 03 13:49:58 crc kubenswrapper[4861]: W1003 13:49:58.756323 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod658e6aab_e882_407b_bd13_1c293b326996.slice/crio-1806d26ed176086f59a48a1c74f6d18318a2648dbaa24de5feceeaa409242240 WatchSource:0}: Error finding container 1806d26ed176086f59a48a1c74f6d18318a2648dbaa24de5feceeaa409242240: Status 404 returned error can't find the container with id 1806d26ed176086f59a48a1c74f6d18318a2648dbaa24de5feceeaa409242240 Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.845778 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.884113 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qv9q\" (UniqueName: \"kubernetes.io/projected/a769cb74-01d8-4a40-8a92-4cc15f1d418a-kube-api-access-6qv9q\") pod \"a769cb74-01d8-4a40-8a92-4cc15f1d418a\" (UID: \"a769cb74-01d8-4a40-8a92-4cc15f1d418a\") " Oct 03 13:49:58 crc kubenswrapper[4861]: I1003 13:49:58.898274 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a769cb74-01d8-4a40-8a92-4cc15f1d418a-kube-api-access-6qv9q" (OuterVolumeSpecName: "kube-api-access-6qv9q") pod "a769cb74-01d8-4a40-8a92-4cc15f1d418a" (UID: "a769cb74-01d8-4a40-8a92-4cc15f1d418a"). InnerVolumeSpecName "kube-api-access-6qv9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:58.931481 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:58.986040 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5r6s\" (UniqueName: \"kubernetes.io/projected/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4-kube-api-access-t5r6s\") pod \"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4\" (UID: \"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4\") " Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:58.986497 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qv9q\" (UniqueName: \"kubernetes.io/projected/a769cb74-01d8-4a40-8a92-4cc15f1d418a-kube-api-access-6qv9q\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:58.996465 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4-kube-api-access-t5r6s" (OuterVolumeSpecName: "kube-api-access-t5r6s") pod "1a715a54-ad54-4756-aa5c-32ac4cc2e1f4" (UID: "1a715a54-ad54-4756-aa5c-32ac4cc2e1f4"). InnerVolumeSpecName "kube-api-access-t5r6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.088165 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5r6s\" (UniqueName: \"kubernetes.io/projected/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4-kube-api-access-t5r6s\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.195210 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.195825 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.290780 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pl6t\" (UniqueName: \"kubernetes.io/projected/900785d8-c3f4-4ba6-a925-56165ba86a6d-kube-api-access-4pl6t\") pod \"900785d8-c3f4-4ba6-a925-56165ba86a6d\" (UID: \"900785d8-c3f4-4ba6-a925-56165ba86a6d\") " Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.290967 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48cfk\" (UniqueName: \"kubernetes.io/projected/6f60eab9-bc53-4007-bc7e-261845584dae-kube-api-access-48cfk\") pod \"6f60eab9-bc53-4007-bc7e-261845584dae\" (UID: \"6f60eab9-bc53-4007-bc7e-261845584dae\") " Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.300442 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f60eab9-bc53-4007-bc7e-261845584dae-kube-api-access-48cfk" (OuterVolumeSpecName: "kube-api-access-48cfk") pod "6f60eab9-bc53-4007-bc7e-261845584dae" (UID: "6f60eab9-bc53-4007-bc7e-261845584dae"). InnerVolumeSpecName "kube-api-access-48cfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.302398 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/900785d8-c3f4-4ba6-a925-56165ba86a6d-kube-api-access-4pl6t" (OuterVolumeSpecName: "kube-api-access-4pl6t") pod "900785d8-c3f4-4ba6-a925-56165ba86a6d" (UID: "900785d8-c3f4-4ba6-a925-56165ba86a6d"). InnerVolumeSpecName "kube-api-access-4pl6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.393290 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48cfk\" (UniqueName: \"kubernetes.io/projected/6f60eab9-bc53-4007-bc7e-261845584dae-kube-api-access-48cfk\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.393330 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pl6t\" (UniqueName: \"kubernetes.io/projected/900785d8-c3f4-4ba6-a925-56165ba86a6d-kube-api-access-4pl6t\") on node \"crc\" DevicePath \"\"" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.539846 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-119e-account-create-jtptm" event={"ID":"a769cb74-01d8-4a40-8a92-4cc15f1d418a","Type":"ContainerDied","Data":"734b23980f5d03cfc31d622b02af1adf8dcb71a558e6db81bd9ed17e74d86922"} Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.539876 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="734b23980f5d03cfc31d622b02af1adf8dcb71a558e6db81bd9ed17e74d86922" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.539929 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-119e-account-create-jtptm" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.543337 4861 generic.go:334] "Generic (PLEG): container finished" podID="658e6aab-e882-407b-bd13-1c293b326996" containerID="9798c1ac4b36142aab68c5310ebbfe2091c1de7e63034ad81ed2b2c7031df9d5" exitCode=0 Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.543430 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" event={"ID":"658e6aab-e882-407b-bd13-1c293b326996","Type":"ContainerDied","Data":"9798c1ac4b36142aab68c5310ebbfe2091c1de7e63034ad81ed2b2c7031df9d5"} Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.543462 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" event={"ID":"658e6aab-e882-407b-bd13-1c293b326996","Type":"ContainerStarted","Data":"1806d26ed176086f59a48a1c74f6d18318a2648dbaa24de5feceeaa409242240"} Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.553422 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-4gn45" event={"ID":"6f60eab9-bc53-4007-bc7e-261845584dae","Type":"ContainerDied","Data":"b16c3000f90e99b4a4581e00a0f9333686562f1c9425984f77d6b5a436f4668d"} Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.553451 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b16c3000f90e99b4a4581e00a0f9333686562f1c9425984f77d6b5a436f4668d" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.553514 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-4gn45" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.557932 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8a29-account-create-qtclr" event={"ID":"900785d8-c3f4-4ba6-a925-56165ba86a6d","Type":"ContainerDied","Data":"d7863b8dfc2cd8781e9706b2e990f6da02031945bd3c59aeede132a8bd4ddf1b"} Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.557956 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8a29-account-create-qtclr" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.557964 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7863b8dfc2cd8781e9706b2e990f6da02031945bd3c59aeede132a8bd4ddf1b" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.559061 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jdcl" Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.559430 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jdcl" event={"ID":"1a715a54-ad54-4756-aa5c-32ac4cc2e1f4","Type":"ContainerDied","Data":"a88e3223a5eb8129587cf8e323d6e1301f10762d515d92d32daa10a6c02bbbaa"} Oct 03 13:49:59 crc kubenswrapper[4861]: I1003 13:49:59.559475 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a88e3223a5eb8129587cf8e323d6e1301f10762d515d92d32daa10a6c02bbbaa" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.026981 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.119480 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxhfq\" (UniqueName: \"kubernetes.io/projected/4be391e0-99d9-4191-95f7-416e9e893b30-kube-api-access-gxhfq\") pod \"4be391e0-99d9-4191-95f7-416e9e893b30\" (UID: \"4be391e0-99d9-4191-95f7-416e9e893b30\") " Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.125491 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4be391e0-99d9-4191-95f7-416e9e893b30-kube-api-access-gxhfq" (OuterVolumeSpecName: "kube-api-access-gxhfq") pod "4be391e0-99d9-4191-95f7-416e9e893b30" (UID: "4be391e0-99d9-4191-95f7-416e9e893b30"). InnerVolumeSpecName "kube-api-access-gxhfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.144797 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.144857 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.222722 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxhfq\" (UniqueName: \"kubernetes.io/projected/4be391e0-99d9-4191-95f7-416e9e893b30-kube-api-access-gxhfq\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.567785 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" event={"ID":"658e6aab-e882-407b-bd13-1c293b326996","Type":"ContainerStarted","Data":"fb0d4456c25e1544bde81cd98471695bf986955f49d4344a6205e03f8fd21e05"} Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.567908 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.571913 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a1af-account-create-r7n7q" event={"ID":"4be391e0-99d9-4191-95f7-416e9e893b30","Type":"ContainerDied","Data":"077791390a3b8e6f82813ba0babf04d1132ec2b2fa001d8e6e89856f1c417ad8"} Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.571951 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="077791390a3b8e6f82813ba0babf04d1132ec2b2fa001d8e6e89856f1c417ad8" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.572007 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a1af-account-create-r7n7q" Oct 03 13:50:00 crc kubenswrapper[4861]: I1003 13:50:00.595666 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podStartSLOduration=3.595649413 podStartE2EDuration="3.595649413s" podCreationTimestamp="2025-10-03 13:49:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:00.593062607 +0000 UTC m=+1114.591047654" watchObservedRunningTime="2025-10-03 13:50:00.595649413 +0000 UTC m=+1114.593634470" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.279839 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-gnswn"] Oct 03 13:50:01 crc kubenswrapper[4861]: E1003 13:50:01.280568 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="900785d8-c3f4-4ba6-a925-56165ba86a6d" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280586 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="900785d8-c3f4-4ba6-a925-56165ba86a6d" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: E1003 13:50:01.280603 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a715a54-ad54-4756-aa5c-32ac4cc2e1f4" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280612 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a715a54-ad54-4756-aa5c-32ac4cc2e1f4" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: E1003 13:50:01.280630 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a769cb74-01d8-4a40-8a92-4cc15f1d418a" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280637 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a769cb74-01d8-4a40-8a92-4cc15f1d418a" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: E1003 13:50:01.280652 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2641015-4563-45ac-bfa5-e19fcc791806" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280659 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2641015-4563-45ac-bfa5-e19fcc791806" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: E1003 13:50:01.280673 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f60eab9-bc53-4007-bc7e-261845584dae" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280681 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f60eab9-bc53-4007-bc7e-261845584dae" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: E1003 13:50:01.280702 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be391e0-99d9-4191-95f7-416e9e893b30" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280709 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be391e0-99d9-4191-95f7-416e9e893b30" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280897 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a715a54-ad54-4756-aa5c-32ac4cc2e1f4" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280915 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2641015-4563-45ac-bfa5-e19fcc791806" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280934 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f60eab9-bc53-4007-bc7e-261845584dae" containerName="mariadb-database-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280951 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="4be391e0-99d9-4191-95f7-416e9e893b30" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280967 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="900785d8-c3f4-4ba6-a925-56165ba86a6d" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.280975 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a769cb74-01d8-4a40-8a92-4cc15f1d418a" containerName="mariadb-account-create" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.281654 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.283954 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.284164 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.284431 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x97lk" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.284489 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.297473 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gnswn"] Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.302074 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-x5wkc" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.345363 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-config-data\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.345423 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9zbn\" (UniqueName: \"kubernetes.io/projected/9041b332-7c62-48b3-827e-e74f29984d35-kube-api-access-f9zbn\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.345697 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-combined-ca-bundle\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.447634 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-config-data\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.447687 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9zbn\" (UniqueName: \"kubernetes.io/projected/9041b332-7c62-48b3-827e-e74f29984d35-kube-api-access-f9zbn\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.447785 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-combined-ca-bundle\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.453825 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-combined-ca-bundle\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.456978 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-config-data\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.469828 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9zbn\" (UniqueName: \"kubernetes.io/projected/9041b332-7c62-48b3-827e-e74f29984d35-kube-api-access-f9zbn\") pod \"keystone-db-sync-gnswn\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.478511 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-fb9qp"] Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.479512 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.481877 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.482214 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-whl4w" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.535350 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-fb9qp"] Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.549543 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvww6\" (UniqueName: \"kubernetes.io/projected/caf4725e-be55-4527-b7b6-3be4e6e1999d-kube-api-access-qvww6\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.549600 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-db-sync-config-data\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.549633 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-config-data\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.549687 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-combined-ca-bundle\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.600220 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.650585 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-db-sync-config-data\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.650943 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-config-data\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.651037 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-combined-ca-bundle\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.651143 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvww6\" (UniqueName: \"kubernetes.io/projected/caf4725e-be55-4527-b7b6-3be4e6e1999d-kube-api-access-qvww6\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.658132 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-config-data\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.658784 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-db-sync-config-data\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.664010 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-combined-ca-bundle\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.669861 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvww6\" (UniqueName: \"kubernetes.io/projected/caf4725e-be55-4527-b7b6-3be4e6e1999d-kube-api-access-qvww6\") pod \"glance-db-sync-fb9qp\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:01 crc kubenswrapper[4861]: I1003 13:50:01.857985 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:02 crc kubenswrapper[4861]: I1003 13:50:02.163548 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gnswn"] Oct 03 13:50:02 crc kubenswrapper[4861]: I1003 13:50:02.383735 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-fb9qp"] Oct 03 13:50:02 crc kubenswrapper[4861]: W1003 13:50:02.388496 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcaf4725e_be55_4527_b7b6_3be4e6e1999d.slice/crio-c7e91d94c9e06bc404b4fbc3a0fb5db1ed68896f52e0bb3fff333bf5386cc32f WatchSource:0}: Error finding container c7e91d94c9e06bc404b4fbc3a0fb5db1ed68896f52e0bb3fff333bf5386cc32f: Status 404 returned error can't find the container with id c7e91d94c9e06bc404b4fbc3a0fb5db1ed68896f52e0bb3fff333bf5386cc32f Oct 03 13:50:02 crc kubenswrapper[4861]: I1003 13:50:02.586827 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gnswn" event={"ID":"9041b332-7c62-48b3-827e-e74f29984d35","Type":"ContainerStarted","Data":"a4d3846018ab7570ad50aaef0ce382ae55916697c5998f50cd0a3342a0e81b3d"} Oct 03 13:50:02 crc kubenswrapper[4861]: I1003 13:50:02.588220 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fb9qp" event={"ID":"caf4725e-be55-4527-b7b6-3be4e6e1999d","Type":"ContainerStarted","Data":"c7e91d94c9e06bc404b4fbc3a0fb5db1ed68896f52e0bb3fff333bf5386cc32f"} Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.343624 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-dc57-account-create-47dgl"] Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.344974 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.348402 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.355532 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-dc57-account-create-47dgl"] Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.401061 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grmn9\" (UniqueName: \"kubernetes.io/projected/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26-kube-api-access-grmn9\") pod \"cinder-dc57-account-create-47dgl\" (UID: \"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26\") " pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.502487 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grmn9\" (UniqueName: \"kubernetes.io/projected/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26-kube-api-access-grmn9\") pod \"cinder-dc57-account-create-47dgl\" (UID: \"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26\") " pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.531609 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grmn9\" (UniqueName: \"kubernetes.io/projected/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26-kube-api-access-grmn9\") pod \"cinder-dc57-account-create-47dgl\" (UID: \"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26\") " pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:04 crc kubenswrapper[4861]: I1003 13:50:04.675932 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:07 crc kubenswrapper[4861]: I1003 13:50:07.499301 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-dc57-account-create-47dgl"] Oct 03 13:50:07 crc kubenswrapper[4861]: I1003 13:50:07.634779 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gnswn" event={"ID":"9041b332-7c62-48b3-827e-e74f29984d35","Type":"ContainerStarted","Data":"ca883022040a30e299b696339077f40674b6a8363b0c76c2fd8f8e7350acb223"} Oct 03 13:50:07 crc kubenswrapper[4861]: I1003 13:50:07.638350 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-dc57-account-create-47dgl" event={"ID":"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26","Type":"ContainerStarted","Data":"1bdb71380a95c723d0a3c150144ef73bc08b24c8c4449ff46f0fd3706ce9c08f"} Oct 03 13:50:07 crc kubenswrapper[4861]: I1003 13:50:07.638388 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-dc57-account-create-47dgl" event={"ID":"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26","Type":"ContainerStarted","Data":"be5ef7c407c36adff9702f8877ff77e68e78a000bdb9f25e447121a02deda38e"} Oct 03 13:50:07 crc kubenswrapper[4861]: I1003 13:50:07.660057 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-gnswn" podStartSLOduration=1.7338670189999998 podStartE2EDuration="6.660039907s" podCreationTimestamp="2025-10-03 13:50:01 +0000 UTC" firstStartedPulling="2025-10-03 13:50:02.166101554 +0000 UTC m=+1116.164086601" lastFinishedPulling="2025-10-03 13:50:07.092274442 +0000 UTC m=+1121.090259489" observedRunningTime="2025-10-03 13:50:07.654713631 +0000 UTC m=+1121.652698678" watchObservedRunningTime="2025-10-03 13:50:07.660039907 +0000 UTC m=+1121.658024954" Oct 03 13:50:07 crc kubenswrapper[4861]: I1003 13:50:07.667681 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-dc57-account-create-47dgl" podStartSLOduration=3.667662451 podStartE2EDuration="3.667662451s" podCreationTimestamp="2025-10-03 13:50:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:07.666786439 +0000 UTC m=+1121.664771506" watchObservedRunningTime="2025-10-03 13:50:07.667662451 +0000 UTC m=+1121.665647518" Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.263622 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.318639 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-wv7zw"] Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.319165 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-wv7zw" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="dnsmasq-dns" containerID="cri-o://d5ed3b0cabd0b5fec90bc5d63dad187d2a1339d634d276984118f9748bc0047b" gracePeriod=10 Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.648873 4861 generic.go:334] "Generic (PLEG): container finished" podID="aeebffc3-09a2-4d1b-95e5-abf4ed19fe26" containerID="1bdb71380a95c723d0a3c150144ef73bc08b24c8c4449ff46f0fd3706ce9c08f" exitCode=0 Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.648980 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-dc57-account-create-47dgl" event={"ID":"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26","Type":"ContainerDied","Data":"1bdb71380a95c723d0a3c150144ef73bc08b24c8c4449ff46f0fd3706ce9c08f"} Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.652534 4861 generic.go:334] "Generic (PLEG): container finished" podID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerID="d5ed3b0cabd0b5fec90bc5d63dad187d2a1339d634d276984118f9748bc0047b" exitCode=0 Oct 03 13:50:08 crc kubenswrapper[4861]: I1003 13:50:08.652642 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-wv7zw" event={"ID":"b558a2b4-cf02-41f6-a03d-b1050708ab99","Type":"ContainerDied","Data":"d5ed3b0cabd0b5fec90bc5d63dad187d2a1339d634d276984118f9748bc0047b"} Oct 03 13:50:11 crc kubenswrapper[4861]: I1003 13:50:11.089657 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-wv7zw" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.486618 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6c39-account-create-zbrwd"] Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.487870 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.490394 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.493341 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6c39-account-create-zbrwd"] Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.584027 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvscw\" (UniqueName: \"kubernetes.io/projected/b6abe193-a49f-43f6-ada9-814ebc997a0f-kube-api-access-lvscw\") pod \"barbican-6c39-account-create-zbrwd\" (UID: \"b6abe193-a49f-43f6-ada9-814ebc997a0f\") " pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.676687 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8da1-account-create-wkd4r"] Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.677794 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.682800 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.685894 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvscw\" (UniqueName: \"kubernetes.io/projected/b6abe193-a49f-43f6-ada9-814ebc997a0f-kube-api-access-lvscw\") pod \"barbican-6c39-account-create-zbrwd\" (UID: \"b6abe193-a49f-43f6-ada9-814ebc997a0f\") " pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.685981 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjgst\" (UniqueName: \"kubernetes.io/projected/7129a35f-e546-430e-9cf5-e18ad1e429a4-kube-api-access-hjgst\") pod \"neutron-8da1-account-create-wkd4r\" (UID: \"7129a35f-e546-430e-9cf5-e18ad1e429a4\") " pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.716297 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvscw\" (UniqueName: \"kubernetes.io/projected/b6abe193-a49f-43f6-ada9-814ebc997a0f-kube-api-access-lvscw\") pod \"barbican-6c39-account-create-zbrwd\" (UID: \"b6abe193-a49f-43f6-ada9-814ebc997a0f\") " pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.717586 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8da1-account-create-wkd4r"] Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.787443 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjgst\" (UniqueName: \"kubernetes.io/projected/7129a35f-e546-430e-9cf5-e18ad1e429a4-kube-api-access-hjgst\") pod \"neutron-8da1-account-create-wkd4r\" (UID: \"7129a35f-e546-430e-9cf5-e18ad1e429a4\") " pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.807263 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjgst\" (UniqueName: \"kubernetes.io/projected/7129a35f-e546-430e-9cf5-e18ad1e429a4-kube-api-access-hjgst\") pod \"neutron-8da1-account-create-wkd4r\" (UID: \"7129a35f-e546-430e-9cf5-e18ad1e429a4\") " pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:14 crc kubenswrapper[4861]: I1003 13:50:14.851433 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.002702 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.359871 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.397154 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grmn9\" (UniqueName: \"kubernetes.io/projected/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26-kube-api-access-grmn9\") pod \"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26\" (UID: \"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26\") " Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.402287 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26-kube-api-access-grmn9" (OuterVolumeSpecName: "kube-api-access-grmn9") pod "aeebffc3-09a2-4d1b-95e5-abf4ed19fe26" (UID: "aeebffc3-09a2-4d1b-95e5-abf4ed19fe26"). InnerVolumeSpecName "kube-api-access-grmn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.499177 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grmn9\" (UniqueName: \"kubernetes.io/projected/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26-kube-api-access-grmn9\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.512505 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.600948 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-dns-svc\") pod \"b558a2b4-cf02-41f6-a03d-b1050708ab99\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.601151 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfxh8\" (UniqueName: \"kubernetes.io/projected/b558a2b4-cf02-41f6-a03d-b1050708ab99-kube-api-access-tfxh8\") pod \"b558a2b4-cf02-41f6-a03d-b1050708ab99\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.601263 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-config\") pod \"b558a2b4-cf02-41f6-a03d-b1050708ab99\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.601296 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-sb\") pod \"b558a2b4-cf02-41f6-a03d-b1050708ab99\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.601369 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-nb\") pod \"b558a2b4-cf02-41f6-a03d-b1050708ab99\" (UID: \"b558a2b4-cf02-41f6-a03d-b1050708ab99\") " Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.608023 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b558a2b4-cf02-41f6-a03d-b1050708ab99-kube-api-access-tfxh8" (OuterVolumeSpecName: "kube-api-access-tfxh8") pod "b558a2b4-cf02-41f6-a03d-b1050708ab99" (UID: "b558a2b4-cf02-41f6-a03d-b1050708ab99"). InnerVolumeSpecName "kube-api-access-tfxh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.650736 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b558a2b4-cf02-41f6-a03d-b1050708ab99" (UID: "b558a2b4-cf02-41f6-a03d-b1050708ab99"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.658262 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b558a2b4-cf02-41f6-a03d-b1050708ab99" (UID: "b558a2b4-cf02-41f6-a03d-b1050708ab99"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.664634 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b558a2b4-cf02-41f6-a03d-b1050708ab99" (UID: "b558a2b4-cf02-41f6-a03d-b1050708ab99"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.674008 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-config" (OuterVolumeSpecName: "config") pod "b558a2b4-cf02-41f6-a03d-b1050708ab99" (UID: "b558a2b4-cf02-41f6-a03d-b1050708ab99"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.702776 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.702811 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.702823 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.702834 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b558a2b4-cf02-41f6-a03d-b1050708ab99-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.702846 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfxh8\" (UniqueName: \"kubernetes.io/projected/b558a2b4-cf02-41f6-a03d-b1050708ab99-kube-api-access-tfxh8\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.734508 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-wv7zw" event={"ID":"b558a2b4-cf02-41f6-a03d-b1050708ab99","Type":"ContainerDied","Data":"298a849d92bacd908c26967c775b1ba09891d19f70012c653747ede513213f02"} Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.734598 4861 scope.go:117] "RemoveContainer" containerID="d5ed3b0cabd0b5fec90bc5d63dad187d2a1339d634d276984118f9748bc0047b" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.734900 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-wv7zw" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.762224 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-dc57-account-create-47dgl" event={"ID":"aeebffc3-09a2-4d1b-95e5-abf4ed19fe26","Type":"ContainerDied","Data":"be5ef7c407c36adff9702f8877ff77e68e78a000bdb9f25e447121a02deda38e"} Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.762306 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be5ef7c407c36adff9702f8877ff77e68e78a000bdb9f25e447121a02deda38e" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.762429 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-dc57-account-create-47dgl" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.805417 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-wv7zw"] Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.813848 4861 scope.go:117] "RemoveContainer" containerID="9d9d62cbc75e1b27c7e788de68763370e681d62bcdcb5be585d79e8f7d417d4d" Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.820755 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-wv7zw"] Oct 03 13:50:15 crc kubenswrapper[4861]: I1003 13:50:15.845645 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6c39-account-create-zbrwd"] Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.008105 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8da1-account-create-wkd4r"] Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.693386 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" path="/var/lib/kubelet/pods/b558a2b4-cf02-41f6-a03d-b1050708ab99/volumes" Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.771613 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fb9qp" event={"ID":"caf4725e-be55-4527-b7b6-3be4e6e1999d","Type":"ContainerStarted","Data":"00b354772bbc010bd66cf6d4c53c4595fe46beb7ceebef3b61b37fb353fb6846"} Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.777323 4861 generic.go:334] "Generic (PLEG): container finished" podID="b6abe193-a49f-43f6-ada9-814ebc997a0f" containerID="2554a61636729dafd3c60fca77e8f66f6052cbb6c412d3c2306e97fdf30ef074" exitCode=0 Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.777374 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6c39-account-create-zbrwd" event={"ID":"b6abe193-a49f-43f6-ada9-814ebc997a0f","Type":"ContainerDied","Data":"2554a61636729dafd3c60fca77e8f66f6052cbb6c412d3c2306e97fdf30ef074"} Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.777395 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6c39-account-create-zbrwd" event={"ID":"b6abe193-a49f-43f6-ada9-814ebc997a0f","Type":"ContainerStarted","Data":"dfde2e1b266ab1c01af3be3556892a3e91a08b2045e85d1a2abe581d7465e50f"} Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.778816 4861 generic.go:334] "Generic (PLEG): container finished" podID="9041b332-7c62-48b3-827e-e74f29984d35" containerID="ca883022040a30e299b696339077f40674b6a8363b0c76c2fd8f8e7350acb223" exitCode=0 Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.778860 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gnswn" event={"ID":"9041b332-7c62-48b3-827e-e74f29984d35","Type":"ContainerDied","Data":"ca883022040a30e299b696339077f40674b6a8363b0c76c2fd8f8e7350acb223"} Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.780375 4861 generic.go:334] "Generic (PLEG): container finished" podID="7129a35f-e546-430e-9cf5-e18ad1e429a4" containerID="da89576fc34a0d2e3706d450553ac9dc9910cd7ce7c0dfee8451c5d7f3b9037d" exitCode=0 Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.780400 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8da1-account-create-wkd4r" event={"ID":"7129a35f-e546-430e-9cf5-e18ad1e429a4","Type":"ContainerDied","Data":"da89576fc34a0d2e3706d450553ac9dc9910cd7ce7c0dfee8451c5d7f3b9037d"} Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.780415 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8da1-account-create-wkd4r" event={"ID":"7129a35f-e546-430e-9cf5-e18ad1e429a4","Type":"ContainerStarted","Data":"d40a52ac56ce6521c6d2590b4435928a1a8b3b7864c0f2214febd40b572f2d19"} Oct 03 13:50:16 crc kubenswrapper[4861]: I1003 13:50:16.796433 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-fb9qp" podStartSLOduration=2.837887097 podStartE2EDuration="15.79641486s" podCreationTimestamp="2025-10-03 13:50:01 +0000 UTC" firstStartedPulling="2025-10-03 13:50:02.390689776 +0000 UTC m=+1116.388674823" lastFinishedPulling="2025-10-03 13:50:15.349217539 +0000 UTC m=+1129.347202586" observedRunningTime="2025-10-03 13:50:16.78859183 +0000 UTC m=+1130.786576877" watchObservedRunningTime="2025-10-03 13:50:16.79641486 +0000 UTC m=+1130.794399907" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.120925 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.221895 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.240584 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.257588 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjgst\" (UniqueName: \"kubernetes.io/projected/7129a35f-e546-430e-9cf5-e18ad1e429a4-kube-api-access-hjgst\") pod \"7129a35f-e546-430e-9cf5-e18ad1e429a4\" (UID: \"7129a35f-e546-430e-9cf5-e18ad1e429a4\") " Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.277134 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7129a35f-e546-430e-9cf5-e18ad1e429a4-kube-api-access-hjgst" (OuterVolumeSpecName: "kube-api-access-hjgst") pod "7129a35f-e546-430e-9cf5-e18ad1e429a4" (UID: "7129a35f-e546-430e-9cf5-e18ad1e429a4"). InnerVolumeSpecName "kube-api-access-hjgst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.359720 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvscw\" (UniqueName: \"kubernetes.io/projected/b6abe193-a49f-43f6-ada9-814ebc997a0f-kube-api-access-lvscw\") pod \"b6abe193-a49f-43f6-ada9-814ebc997a0f\" (UID: \"b6abe193-a49f-43f6-ada9-814ebc997a0f\") " Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.360288 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9zbn\" (UniqueName: \"kubernetes.io/projected/9041b332-7c62-48b3-827e-e74f29984d35-kube-api-access-f9zbn\") pod \"9041b332-7c62-48b3-827e-e74f29984d35\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.360421 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-combined-ca-bundle\") pod \"9041b332-7c62-48b3-827e-e74f29984d35\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.360674 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-config-data\") pod \"9041b332-7c62-48b3-827e-e74f29984d35\" (UID: \"9041b332-7c62-48b3-827e-e74f29984d35\") " Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.361358 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjgst\" (UniqueName: \"kubernetes.io/projected/7129a35f-e546-430e-9cf5-e18ad1e429a4-kube-api-access-hjgst\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.363388 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9041b332-7c62-48b3-827e-e74f29984d35-kube-api-access-f9zbn" (OuterVolumeSpecName: "kube-api-access-f9zbn") pod "9041b332-7c62-48b3-827e-e74f29984d35" (UID: "9041b332-7c62-48b3-827e-e74f29984d35"). InnerVolumeSpecName "kube-api-access-f9zbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.363761 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6abe193-a49f-43f6-ada9-814ebc997a0f-kube-api-access-lvscw" (OuterVolumeSpecName: "kube-api-access-lvscw") pod "b6abe193-a49f-43f6-ada9-814ebc997a0f" (UID: "b6abe193-a49f-43f6-ada9-814ebc997a0f"). InnerVolumeSpecName "kube-api-access-lvscw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.386347 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9041b332-7c62-48b3-827e-e74f29984d35" (UID: "9041b332-7c62-48b3-827e-e74f29984d35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.403996 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-config-data" (OuterVolumeSpecName: "config-data") pod "9041b332-7c62-48b3-827e-e74f29984d35" (UID: "9041b332-7c62-48b3-827e-e74f29984d35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.462185 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.462223 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvscw\" (UniqueName: \"kubernetes.io/projected/b6abe193-a49f-43f6-ada9-814ebc997a0f-kube-api-access-lvscw\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.462252 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9zbn\" (UniqueName: \"kubernetes.io/projected/9041b332-7c62-48b3-827e-e74f29984d35-kube-api-access-f9zbn\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.462263 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9041b332-7c62-48b3-827e-e74f29984d35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.795380 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gnswn" event={"ID":"9041b332-7c62-48b3-827e-e74f29984d35","Type":"ContainerDied","Data":"a4d3846018ab7570ad50aaef0ce382ae55916697c5998f50cd0a3342a0e81b3d"} Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.795454 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4d3846018ab7570ad50aaef0ce382ae55916697c5998f50cd0a3342a0e81b3d" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.795619 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gnswn" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.797601 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8da1-account-create-wkd4r" event={"ID":"7129a35f-e546-430e-9cf5-e18ad1e429a4","Type":"ContainerDied","Data":"d40a52ac56ce6521c6d2590b4435928a1a8b3b7864c0f2214febd40b572f2d19"} Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.797633 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d40a52ac56ce6521c6d2590b4435928a1a8b3b7864c0f2214febd40b572f2d19" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.797653 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8da1-account-create-wkd4r" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.799618 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6c39-account-create-zbrwd" event={"ID":"b6abe193-a49f-43f6-ada9-814ebc997a0f","Type":"ContainerDied","Data":"dfde2e1b266ab1c01af3be3556892a3e91a08b2045e85d1a2abe581d7465e50f"} Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.799659 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfde2e1b266ab1c01af3be3556892a3e91a08b2045e85d1a2abe581d7465e50f" Oct 03 13:50:18 crc kubenswrapper[4861]: I1003 13:50:18.799701 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6c39-account-create-zbrwd" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.099030 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-wwcck"] Oct 03 13:50:19 crc kubenswrapper[4861]: E1003 13:50:19.099815 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9041b332-7c62-48b3-827e-e74f29984d35" containerName="keystone-db-sync" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.099908 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9041b332-7c62-48b3-827e-e74f29984d35" containerName="keystone-db-sync" Oct 03 13:50:19 crc kubenswrapper[4861]: E1003 13:50:19.099962 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="init" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100009 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="init" Oct 03 13:50:19 crc kubenswrapper[4861]: E1003 13:50:19.100060 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7129a35f-e546-430e-9cf5-e18ad1e429a4" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100105 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7129a35f-e546-430e-9cf5-e18ad1e429a4" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: E1003 13:50:19.100150 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="dnsmasq-dns" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100200 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="dnsmasq-dns" Oct 03 13:50:19 crc kubenswrapper[4861]: E1003 13:50:19.100271 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeebffc3-09a2-4d1b-95e5-abf4ed19fe26" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100323 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeebffc3-09a2-4d1b-95e5-abf4ed19fe26" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: E1003 13:50:19.100377 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6abe193-a49f-43f6-ada9-814ebc997a0f" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100433 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6abe193-a49f-43f6-ada9-814ebc997a0f" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100624 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeebffc3-09a2-4d1b-95e5-abf4ed19fe26" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100872 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b558a2b4-cf02-41f6-a03d-b1050708ab99" containerName="dnsmasq-dns" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100931 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6abe193-a49f-43f6-ada9-814ebc997a0f" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.100991 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7129a35f-e546-430e-9cf5-e18ad1e429a4" containerName="mariadb-account-create" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.101043 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="9041b332-7c62-48b3-827e-e74f29984d35" containerName="keystone-db-sync" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.101891 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.139361 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-wwcck"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.177191 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.177327 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-config\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.177373 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.177458 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbl4c\" (UniqueName: \"kubernetes.io/projected/204dc908-11a2-48d9-980f-c9a9fdc66079-kube-api-access-lbl4c\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.177506 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.177557 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-svc\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.281424 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.281718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-svc\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.281882 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.282015 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-config\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.282122 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.282290 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbl4c\" (UniqueName: \"kubernetes.io/projected/204dc908-11a2-48d9-980f-c9a9fdc66079-kube-api-access-lbl4c\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.283784 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-svc\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.285027 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-config\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.285601 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.286135 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.286303 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.293397 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lj9xf"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.312028 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.331781 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.332013 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.332101 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.332393 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x97lk" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.332491 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lj9xf"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.334722 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbl4c\" (UniqueName: \"kubernetes.io/projected/204dc908-11a2-48d9-980f-c9a9fdc66079-kube-api-access-lbl4c\") pod \"dnsmasq-dns-55fff446b9-wwcck\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.426649 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.447125 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-549449d4bc-ffsq9"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.457950 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.467211 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.467437 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-m47jx" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.467887 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.469514 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-549449d4bc-ffsq9"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.479033 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492375 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-logs\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492571 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-horizon-secret-key\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492608 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-credential-keys\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492636 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-fernet-keys\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492670 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vz4w\" (UniqueName: \"kubernetes.io/projected/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-kube-api-access-7vz4w\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492797 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-config-data\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492825 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-289cb\" (UniqueName: \"kubernetes.io/projected/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-kube-api-access-289cb\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492851 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-scripts\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492869 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-config-data\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492901 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-scripts\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.492953 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-combined-ca-bundle\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.551151 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-wwcck"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.591502 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.596262 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.598976 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-combined-ca-bundle\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599032 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-logs\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599082 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-horizon-secret-key\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599102 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-credential-keys\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599121 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-fernet-keys\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599140 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vz4w\" (UniqueName: \"kubernetes.io/projected/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-kube-api-access-7vz4w\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599189 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-config-data\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599206 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-289cb\" (UniqueName: \"kubernetes.io/projected/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-kube-api-access-289cb\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599245 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-scripts\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599263 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-config-data\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.599283 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-scripts\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.600154 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-logs\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.604069 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-credential-keys\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.605057 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-scripts\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.607313 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-config-data\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.611886 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.617302 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.646442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-fernet-keys\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.646976 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-horizon-secret-key\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.648742 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-combined-ca-bundle\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.651030 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-config-data\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.653569 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-scripts\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.658791 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vz4w\" (UniqueName: \"kubernetes.io/projected/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-kube-api-access-7vz4w\") pod \"horizon-549449d4bc-ffsq9\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.664727 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-9ckx2"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.669878 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.670007 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.689242 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-9ckx2"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.703810 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-f6j5h"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.704992 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706097 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-run-httpd\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706145 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-log-httpd\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706171 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706189 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-scripts\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706217 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706272 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706297 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706314 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-config\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706333 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-config-data\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706349 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706377 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706392 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxmmv\" (UniqueName: \"kubernetes.io/projected/c53e054d-0317-469b-b94a-1d73bdfc5171-kube-api-access-hxmmv\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.706409 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2psg2\" (UniqueName: \"kubernetes.io/projected/3413d18c-4d35-4962-bc3f-b6750860b13d-kube-api-access-2psg2\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.707952 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-tnpj6" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.708147 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.708416 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.729458 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-f6j5h"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.746998 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-289cb\" (UniqueName: \"kubernetes.io/projected/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-kube-api-access-289cb\") pod \"keystone-bootstrap-lj9xf\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.764151 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d4b7fc5ff-4blmt"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.765740 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.804624 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d4b7fc5ff-4blmt"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809202 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-run-httpd\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809278 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5l5f\" (UniqueName: \"kubernetes.io/projected/7b148d26-3aac-44de-9776-c03b03c5fff2-kube-api-access-x5l5f\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809307 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-log-httpd\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809328 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809349 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-scripts\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809366 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-scripts\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809386 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-horizon-secret-key\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809412 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809441 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-combined-ca-bundle\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809459 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809482 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-config-data\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809503 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809521 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-config\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809541 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-config-data\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809557 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-logs\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809572 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809596 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-scripts\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809614 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b148d26-3aac-44de-9776-c03b03c5fff2-logs\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809632 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809651 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxmmv\" (UniqueName: \"kubernetes.io/projected/c53e054d-0317-469b-b94a-1d73bdfc5171-kube-api-access-hxmmv\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809668 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2psg2\" (UniqueName: \"kubernetes.io/projected/3413d18c-4d35-4962-bc3f-b6750860b13d-kube-api-access-2psg2\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809686 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fr6d\" (UniqueName: \"kubernetes.io/projected/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-kube-api-access-7fr6d\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.809709 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-config-data\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.813664 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.814054 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-log-httpd\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.814656 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-run-httpd\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.815396 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.817301 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.818013 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.824030 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-config\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.825147 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.832787 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-config-data\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.837505 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-scripts\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.843771 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.852453 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxmmv\" (UniqueName: \"kubernetes.io/projected/c53e054d-0317-469b-b94a-1d73bdfc5171-kube-api-access-hxmmv\") pod \"dnsmasq-dns-76fcf4b695-9ckx2\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.857070 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916334 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5l5f\" (UniqueName: \"kubernetes.io/projected/7b148d26-3aac-44de-9776-c03b03c5fff2-kube-api-access-x5l5f\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916388 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-scripts\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916411 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-horizon-secret-key\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916443 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-combined-ca-bundle\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916470 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-config-data\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916499 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-logs\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916523 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-scripts\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916541 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b148d26-3aac-44de-9776-c03b03c5fff2-logs\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916569 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fr6d\" (UniqueName: \"kubernetes.io/projected/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-kube-api-access-7fr6d\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.916593 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-config-data\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.919267 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2psg2\" (UniqueName: \"kubernetes.io/projected/3413d18c-4d35-4962-bc3f-b6750860b13d-kube-api-access-2psg2\") pod \"ceilometer-0\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " pod="openstack/ceilometer-0" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.919989 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-scripts\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.920634 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-logs\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.920870 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b148d26-3aac-44de-9776-c03b03c5fff2-logs\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.921208 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-config-data\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.947204 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-horizon-secret-key\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.951577 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-ksvcg"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.952252 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-combined-ca-bundle\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.952977 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.956929 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.957101 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.958362 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zg4gc" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.959730 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fr6d\" (UniqueName: \"kubernetes.io/projected/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-kube-api-access-7fr6d\") pod \"horizon-6d4b7fc5ff-4blmt\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.961565 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-scripts\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.963560 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-config-data\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.963990 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5l5f\" (UniqueName: \"kubernetes.io/projected/7b148d26-3aac-44de-9776-c03b03c5fff2-kube-api-access-x5l5f\") pod \"placement-db-sync-f6j5h\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.988285 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ksvcg"] Oct 03 13:50:19 crc kubenswrapper[4861]: I1003 13:50:19.998778 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.020820 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456069ef-db45-4878-85d5-1e5001fa789e-etc-machine-id\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.020892 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-combined-ca-bundle\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.020967 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-config-data\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.020985 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-scripts\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.021008 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-db-sync-config-data\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.021031 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh644\" (UniqueName: \"kubernetes.io/projected/456069ef-db45-4878-85d5-1e5001fa789e-kube-api-access-fh644\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.038785 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.071336 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.133610 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f6j5h" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.134552 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh644\" (UniqueName: \"kubernetes.io/projected/456069ef-db45-4878-85d5-1e5001fa789e-kube-api-access-fh644\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.134600 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456069ef-db45-4878-85d5-1e5001fa789e-etc-machine-id\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.134646 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-combined-ca-bundle\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.134706 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-config-data\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.134722 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-scripts\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.134750 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-db-sync-config-data\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.141922 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456069ef-db45-4878-85d5-1e5001fa789e-etc-machine-id\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.144119 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-cgf9r"] Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.145128 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.151832 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-db-sync-config-data\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.154717 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-scripts\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.155525 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-config-data\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.157484 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.158722 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.158853 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.158735 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kl4jt" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.161860 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-combined-ca-bundle\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.197091 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-cgf9r"] Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.216803 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh644\" (UniqueName: \"kubernetes.io/projected/456069ef-db45-4878-85d5-1e5001fa789e-kube-api-access-fh644\") pod \"cinder-db-sync-ksvcg\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.218725 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-h4wcn"] Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236342 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236345 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2xpr\" (UniqueName: \"kubernetes.io/projected/3b0f621f-bdf0-4768-a764-0bc15e01faba-kube-api-access-b2xpr\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236637 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-config\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236678 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-combined-ca-bundle\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236728 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-combined-ca-bundle\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236792 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-db-sync-config-data\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.236822 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qhrl\" (UniqueName: \"kubernetes.io/projected/a7415066-e954-4d19-9167-1a795f87add7-kube-api-access-4qhrl\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.251691 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rdm8r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.251937 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.275103 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-h4wcn"] Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.288963 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.340718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-db-sync-config-data\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.340755 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qhrl\" (UniqueName: \"kubernetes.io/projected/a7415066-e954-4d19-9167-1a795f87add7-kube-api-access-4qhrl\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.340800 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2xpr\" (UniqueName: \"kubernetes.io/projected/3b0f621f-bdf0-4768-a764-0bc15e01faba-kube-api-access-b2xpr\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.340813 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-config\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.340843 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-combined-ca-bundle\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.340876 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-combined-ca-bundle\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.352450 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-combined-ca-bundle\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.356503 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-config\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.360804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-combined-ca-bundle\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.378826 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-db-sync-config-data\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.399899 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qhrl\" (UniqueName: \"kubernetes.io/projected/a7415066-e954-4d19-9167-1a795f87add7-kube-api-access-4qhrl\") pod \"neutron-db-sync-cgf9r\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.400795 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2xpr\" (UniqueName: \"kubernetes.io/projected/3b0f621f-bdf0-4768-a764-0bc15e01faba-kube-api-access-b2xpr\") pod \"barbican-db-sync-h4wcn\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.487595 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-wwcck"] Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.632606 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-549449d4bc-ffsq9"] Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.632964 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.673510 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.872570 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" event={"ID":"204dc908-11a2-48d9-980f-c9a9fdc66079","Type":"ContainerStarted","Data":"dd6c15a583f5af8a9ce675356ef65191c0ab154945cc80c029f05ce7cc70c622"} Oct 03 13:50:20 crc kubenswrapper[4861]: I1003 13:50:20.874755 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-549449d4bc-ffsq9" event={"ID":"84aa45eb-6ad6-41af-91b9-b6b7b7a43790","Type":"ContainerStarted","Data":"f9864f56d97c8f286c6c4da517d06b8fb796168347fac0a378c780c3a20ac506"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.039550 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lj9xf"] Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.273686 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.289612 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ksvcg"] Oct 03 13:50:21 crc kubenswrapper[4861]: W1003 13:50:21.334593 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod456069ef_db45_4878_85d5_1e5001fa789e.slice/crio-aeffa5a6f229c56885e9ed92ac1d4d72a4de1091e1d9a0e53f57b0e6da40df22 WatchSource:0}: Error finding container aeffa5a6f229c56885e9ed92ac1d4d72a4de1091e1d9a0e53f57b0e6da40df22: Status 404 returned error can't find the container with id aeffa5a6f229c56885e9ed92ac1d4d72a4de1091e1d9a0e53f57b0e6da40df22 Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.434027 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d4b7fc5ff-4blmt"] Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.480894 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-9ckx2"] Oct 03 13:50:21 crc kubenswrapper[4861]: W1003 13:50:21.496991 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc53e054d_0317_469b_b94a_1d73bdfc5171.slice/crio-fc4ca55692b5e34690334ff4ab47a039dd5f7c6e8675268e7ae098f9af32092d WatchSource:0}: Error finding container fc4ca55692b5e34690334ff4ab47a039dd5f7c6e8675268e7ae098f9af32092d: Status 404 returned error can't find the container with id fc4ca55692b5e34690334ff4ab47a039dd5f7c6e8675268e7ae098f9af32092d Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.678486 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-cgf9r"] Oct 03 13:50:21 crc kubenswrapper[4861]: W1003 13:50:21.685806 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7415066_e954_4d19_9167_1a795f87add7.slice/crio-04e50fa17210993866ed1f818fa477e85c4c2ad2038c6447f6d1c9a94cc2a4ba WatchSource:0}: Error finding container 04e50fa17210993866ed1f818fa477e85c4c2ad2038c6447f6d1c9a94cc2a4ba: Status 404 returned error can't find the container with id 04e50fa17210993866ed1f818fa477e85c4c2ad2038c6447f6d1c9a94cc2a4ba Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.696141 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-f6j5h"] Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.709001 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-h4wcn"] Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.892759 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lj9xf" event={"ID":"4e178ffa-097e-4f43-a023-dcb3db4f2c6d","Type":"ContainerStarted","Data":"932053692d9d01408cca63828f1bcc734486fa7c9accdebe01347dd82fbc2416"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.892817 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lj9xf" event={"ID":"4e178ffa-097e-4f43-a023-dcb3db4f2c6d","Type":"ContainerStarted","Data":"e58cf0ec46b9125090740e20879ad9fde1deae800cbb5c471ce7c6e1961116e9"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.894419 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ksvcg" event={"ID":"456069ef-db45-4878-85d5-1e5001fa789e","Type":"ContainerStarted","Data":"aeffa5a6f229c56885e9ed92ac1d4d72a4de1091e1d9a0e53f57b0e6da40df22"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.922039 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cgf9r" event={"ID":"a7415066-e954-4d19-9167-1a795f87add7","Type":"ContainerStarted","Data":"04e50fa17210993866ed1f818fa477e85c4c2ad2038c6447f6d1c9a94cc2a4ba"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.931326 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lj9xf" podStartSLOduration=2.931310605 podStartE2EDuration="2.931310605s" podCreationTimestamp="2025-10-03 13:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:21.925817745 +0000 UTC m=+1135.923802792" watchObservedRunningTime="2025-10-03 13:50:21.931310605 +0000 UTC m=+1135.929295652" Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.932789 4861 generic.go:334] "Generic (PLEG): container finished" podID="204dc908-11a2-48d9-980f-c9a9fdc66079" containerID="2de331104b03b4cf5daf9f703c5845678296a97a3c908d7a839935115cb38b03" exitCode=0 Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.932864 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" event={"ID":"204dc908-11a2-48d9-980f-c9a9fdc66079","Type":"ContainerDied","Data":"2de331104b03b4cf5daf9f703c5845678296a97a3c908d7a839935115cb38b03"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.941574 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerStarted","Data":"f68b032e7f1f7f171b639a9e37a9dcddb60cf162b4329e550892ea411c45e574"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.958696 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-h4wcn" event={"ID":"3b0f621f-bdf0-4768-a764-0bc15e01faba","Type":"ContainerStarted","Data":"6766699593e8f28d23efbc3694de8806e938f184a2e191f22dc80e2685b4da50"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.975987 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f6j5h" event={"ID":"7b148d26-3aac-44de-9776-c03b03c5fff2","Type":"ContainerStarted","Data":"c23f0bf64579fdf0da4abf0b38f3e665570769188eeba510391e64c898ce4aff"} Oct 03 13:50:21 crc kubenswrapper[4861]: I1003 13:50:21.991384 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d4b7fc5ff-4blmt" event={"ID":"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6","Type":"ContainerStarted","Data":"e9d6c48819bc96edde4b9eaed26be773d623e5089bd28d59682a01180ea8b0f1"} Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.002014 4861 generic.go:334] "Generic (PLEG): container finished" podID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerID="ea4ebdabcb7ddc81838062d39c51d82ce92031e139e7e6c84bd37400106f46dc" exitCode=0 Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.002075 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" event={"ID":"c53e054d-0317-469b-b94a-1d73bdfc5171","Type":"ContainerDied","Data":"ea4ebdabcb7ddc81838062d39c51d82ce92031e139e7e6c84bd37400106f46dc"} Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.002099 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" event={"ID":"c53e054d-0317-469b-b94a-1d73bdfc5171","Type":"ContainerStarted","Data":"fc4ca55692b5e34690334ff4ab47a039dd5f7c6e8675268e7ae098f9af32092d"} Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.062666 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-549449d4bc-ffsq9"] Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.117438 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6865665b8c-nhrvv"] Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.118960 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.139659 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.208285 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkxgs\" (UniqueName: \"kubernetes.io/projected/5c7b3e0b-c320-4a02-8140-932d108fa189-kube-api-access-kkxgs\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.208602 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c7b3e0b-c320-4a02-8140-932d108fa189-horizon-secret-key\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.208785 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-config-data\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.208899 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c7b3e0b-c320-4a02-8140-932d108fa189-logs\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.209002 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-scripts\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.226959 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6865665b8c-nhrvv"] Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.310601 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkxgs\" (UniqueName: \"kubernetes.io/projected/5c7b3e0b-c320-4a02-8140-932d108fa189-kube-api-access-kkxgs\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.310885 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c7b3e0b-c320-4a02-8140-932d108fa189-horizon-secret-key\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.310933 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-config-data\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.310962 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c7b3e0b-c320-4a02-8140-932d108fa189-logs\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.310984 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-scripts\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.316512 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-scripts\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.316986 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c7b3e0b-c320-4a02-8140-932d108fa189-logs\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.317443 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-config-data\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.320263 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c7b3e0b-c320-4a02-8140-932d108fa189-horizon-secret-key\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.341515 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkxgs\" (UniqueName: \"kubernetes.io/projected/5c7b3e0b-c320-4a02-8140-932d108fa189-kube-api-access-kkxgs\") pod \"horizon-6865665b8c-nhrvv\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.472400 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.530631 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.625772 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-config\") pod \"204dc908-11a2-48d9-980f-c9a9fdc66079\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.625895 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbl4c\" (UniqueName: \"kubernetes.io/projected/204dc908-11a2-48d9-980f-c9a9fdc66079-kube-api-access-lbl4c\") pod \"204dc908-11a2-48d9-980f-c9a9fdc66079\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.625980 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-sb\") pod \"204dc908-11a2-48d9-980f-c9a9fdc66079\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.626023 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-swift-storage-0\") pod \"204dc908-11a2-48d9-980f-c9a9fdc66079\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.626103 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-nb\") pod \"204dc908-11a2-48d9-980f-c9a9fdc66079\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.627134 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-svc\") pod \"204dc908-11a2-48d9-980f-c9a9fdc66079\" (UID: \"204dc908-11a2-48d9-980f-c9a9fdc66079\") " Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.631465 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/204dc908-11a2-48d9-980f-c9a9fdc66079-kube-api-access-lbl4c" (OuterVolumeSpecName: "kube-api-access-lbl4c") pod "204dc908-11a2-48d9-980f-c9a9fdc66079" (UID: "204dc908-11a2-48d9-980f-c9a9fdc66079"). InnerVolumeSpecName "kube-api-access-lbl4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.655033 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "204dc908-11a2-48d9-980f-c9a9fdc66079" (UID: "204dc908-11a2-48d9-980f-c9a9fdc66079"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.655140 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "204dc908-11a2-48d9-980f-c9a9fdc66079" (UID: "204dc908-11a2-48d9-980f-c9a9fdc66079"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.673030 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-config" (OuterVolumeSpecName: "config") pod "204dc908-11a2-48d9-980f-c9a9fdc66079" (UID: "204dc908-11a2-48d9-980f-c9a9fdc66079"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.675481 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "204dc908-11a2-48d9-980f-c9a9fdc66079" (UID: "204dc908-11a2-48d9-980f-c9a9fdc66079"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.682806 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "204dc908-11a2-48d9-980f-c9a9fdc66079" (UID: "204dc908-11a2-48d9-980f-c9a9fdc66079"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.740900 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.740920 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.740930 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.740939 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.740949 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/204dc908-11a2-48d9-980f-c9a9fdc66079-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:22 crc kubenswrapper[4861]: I1003 13:50:22.740958 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbl4c\" (UniqueName: \"kubernetes.io/projected/204dc908-11a2-48d9-980f-c9a9fdc66079-kube-api-access-lbl4c\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.012204 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" event={"ID":"204dc908-11a2-48d9-980f-c9a9fdc66079","Type":"ContainerDied","Data":"dd6c15a583f5af8a9ce675356ef65191c0ab154945cc80c029f05ce7cc70c622"} Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.012270 4861 scope.go:117] "RemoveContainer" containerID="2de331104b03b4cf5daf9f703c5845678296a97a3c908d7a839935115cb38b03" Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.012331 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-wwcck" Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.020724 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" event={"ID":"c53e054d-0317-469b-b94a-1d73bdfc5171","Type":"ContainerStarted","Data":"5ba27c3db11bc950c81edbd68ecb2b8c39b5e401fba11e8b94a9974eac5ead94"} Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.021094 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.023961 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cgf9r" event={"ID":"a7415066-e954-4d19-9167-1a795f87add7","Type":"ContainerStarted","Data":"0c38c0749c2fee8296c898b93d4f827db7e259dfb4be403af990d096498787d6"} Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.087963 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-wwcck"] Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.110473 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-wwcck"] Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.114510 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" podStartSLOduration=4.11449004 podStartE2EDuration="4.11449004s" podCreationTimestamp="2025-10-03 13:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:23.104490945 +0000 UTC m=+1137.102476002" watchObservedRunningTime="2025-10-03 13:50:23.11449004 +0000 UTC m=+1137.112475087" Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.177734 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-cgf9r" podStartSLOduration=3.177716151 podStartE2EDuration="3.177716151s" podCreationTimestamp="2025-10-03 13:50:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:23.143411207 +0000 UTC m=+1137.141396254" watchObservedRunningTime="2025-10-03 13:50:23.177716151 +0000 UTC m=+1137.175701198" Oct 03 13:50:23 crc kubenswrapper[4861]: I1003 13:50:23.191765 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6865665b8c-nhrvv"] Oct 03 13:50:24 crc kubenswrapper[4861]: I1003 13:50:24.050613 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6865665b8c-nhrvv" event={"ID":"5c7b3e0b-c320-4a02-8140-932d108fa189","Type":"ContainerStarted","Data":"f70fd01c4558779adb6ba8a35e3cacc7e17ce11506a8e699268bc8e3f0fd1d30"} Oct 03 13:50:24 crc kubenswrapper[4861]: I1003 13:50:24.702304 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="204dc908-11a2-48d9-980f-c9a9fdc66079" path="/var/lib/kubelet/pods/204dc908-11a2-48d9-980f-c9a9fdc66079/volumes" Oct 03 13:50:27 crc kubenswrapper[4861]: I1003 13:50:27.098028 4861 generic.go:334] "Generic (PLEG): container finished" podID="4e178ffa-097e-4f43-a023-dcb3db4f2c6d" containerID="932053692d9d01408cca63828f1bcc734486fa7c9accdebe01347dd82fbc2416" exitCode=0 Oct 03 13:50:27 crc kubenswrapper[4861]: I1003 13:50:27.098500 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lj9xf" event={"ID":"4e178ffa-097e-4f43-a023-dcb3db4f2c6d","Type":"ContainerDied","Data":"932053692d9d01408cca63828f1bcc734486fa7c9accdebe01347dd82fbc2416"} Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.191929 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d4b7fc5ff-4blmt"] Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.246299 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-84cdb7b9dd-jhc2h"] Oct 03 13:50:28 crc kubenswrapper[4861]: E1003 13:50:28.246602 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204dc908-11a2-48d9-980f-c9a9fdc66079" containerName="init" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.246614 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="204dc908-11a2-48d9-980f-c9a9fdc66079" containerName="init" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.246783 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="204dc908-11a2-48d9-980f-c9a9fdc66079" containerName="init" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.259876 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.263124 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84cdb7b9dd-jhc2h"] Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.263699 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.309007 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6865665b8c-nhrvv"] Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.375436 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6c8cb9d9fb-bt6ls"] Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.377658 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.411970 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-secret-key\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.412056 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-combined-ca-bundle\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.412101 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-scripts\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.412124 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-tls-certs\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.412938 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-config-data\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.413064 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2czpt\" (UniqueName: \"kubernetes.io/projected/c589e11a-4953-46ec-aeff-a83f6557421f-kube-api-access-2czpt\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.413577 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c589e11a-4953-46ec-aeff-a83f6557421f-logs\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.415359 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c8cb9d9fb-bt6ls"] Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516195 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxnpm\" (UniqueName: \"kubernetes.io/projected/81ec621b-cc30-4ab2-ae0e-bdd71629009f-kube-api-access-wxnpm\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516602 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81ec621b-cc30-4ab2-ae0e-bdd71629009f-scripts\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516629 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-secret-key\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516643 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-horizon-secret-key\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516672 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-combined-ca-bundle\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516686 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-horizon-tls-certs\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516704 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-combined-ca-bundle\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516730 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-scripts\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516745 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-tls-certs\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516775 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-config-data\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516808 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2czpt\" (UniqueName: \"kubernetes.io/projected/c589e11a-4953-46ec-aeff-a83f6557421f-kube-api-access-2czpt\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516840 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81ec621b-cc30-4ab2-ae0e-bdd71629009f-config-data\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516866 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c589e11a-4953-46ec-aeff-a83f6557421f-logs\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.516880 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81ec621b-cc30-4ab2-ae0e-bdd71629009f-logs\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.519955 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-scripts\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.528805 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-config-data\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.529102 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c589e11a-4953-46ec-aeff-a83f6557421f-logs\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.543223 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-combined-ca-bundle\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.543574 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-secret-key\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.545328 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-tls-certs\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.545423 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2czpt\" (UniqueName: \"kubernetes.io/projected/c589e11a-4953-46ec-aeff-a83f6557421f-kube-api-access-2czpt\") pod \"horizon-84cdb7b9dd-jhc2h\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.592989 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618022 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxnpm\" (UniqueName: \"kubernetes.io/projected/81ec621b-cc30-4ab2-ae0e-bdd71629009f-kube-api-access-wxnpm\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618115 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81ec621b-cc30-4ab2-ae0e-bdd71629009f-scripts\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618140 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-horizon-secret-key\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618168 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-combined-ca-bundle\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618185 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-horizon-tls-certs\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618640 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81ec621b-cc30-4ab2-ae0e-bdd71629009f-config-data\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.618680 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81ec621b-cc30-4ab2-ae0e-bdd71629009f-logs\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.619053 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81ec621b-cc30-4ab2-ae0e-bdd71629009f-logs\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.619741 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81ec621b-cc30-4ab2-ae0e-bdd71629009f-scripts\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.620116 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81ec621b-cc30-4ab2-ae0e-bdd71629009f-config-data\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.622351 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-horizon-secret-key\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.625126 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-horizon-tls-certs\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.635122 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81ec621b-cc30-4ab2-ae0e-bdd71629009f-combined-ca-bundle\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.637143 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxnpm\" (UniqueName: \"kubernetes.io/projected/81ec621b-cc30-4ab2-ae0e-bdd71629009f-kube-api-access-wxnpm\") pod \"horizon-6c8cb9d9fb-bt6ls\" (UID: \"81ec621b-cc30-4ab2-ae0e-bdd71629009f\") " pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:28 crc kubenswrapper[4861]: I1003 13:50:28.718577 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.073373 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.138764 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-wv5dm"] Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.139057 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" containerID="cri-o://fb0d4456c25e1544bde81cd98471695bf986955f49d4344a6205e03f8fd21e05" gracePeriod=10 Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.144822 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.144869 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.144912 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.145608 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c374cec0027a71985e4c5ed0abe80567ca6f2e53b91f0c5eb7af2198510c7d3"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.145667 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://7c374cec0027a71985e4c5ed0abe80567ca6f2e53b91f0c5eb7af2198510c7d3" gracePeriod=600 Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.175812 4861 generic.go:334] "Generic (PLEG): container finished" podID="caf4725e-be55-4527-b7b6-3be4e6e1999d" containerID="00b354772bbc010bd66cf6d4c53c4595fe46beb7ceebef3b61b37fb353fb6846" exitCode=0 Oct 03 13:50:30 crc kubenswrapper[4861]: I1003 13:50:30.175873 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fb9qp" event={"ID":"caf4725e-be55-4527-b7b6-3be4e6e1999d","Type":"ContainerDied","Data":"00b354772bbc010bd66cf6d4c53c4595fe46beb7ceebef3b61b37fb353fb6846"} Oct 03 13:50:33 crc kubenswrapper[4861]: I1003 13:50:33.262470 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: connect: connection refused" Oct 03 13:50:34 crc kubenswrapper[4861]: I1003 13:50:34.210011 4861 generic.go:334] "Generic (PLEG): container finished" podID="658e6aab-e882-407b-bd13-1c293b326996" containerID="fb0d4456c25e1544bde81cd98471695bf986955f49d4344a6205e03f8fd21e05" exitCode=0 Oct 03 13:50:34 crc kubenswrapper[4861]: I1003 13:50:34.210081 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" event={"ID":"658e6aab-e882-407b-bd13-1c293b326996","Type":"ContainerDied","Data":"fb0d4456c25e1544bde81cd98471695bf986955f49d4344a6205e03f8fd21e05"} Oct 03 13:50:34 crc kubenswrapper[4861]: I1003 13:50:34.212633 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="7c374cec0027a71985e4c5ed0abe80567ca6f2e53b91f0c5eb7af2198510c7d3" exitCode=0 Oct 03 13:50:34 crc kubenswrapper[4861]: I1003 13:50:34.212667 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"7c374cec0027a71985e4c5ed0abe80567ca6f2e53b91f0c5eb7af2198510c7d3"} Oct 03 13:50:34 crc kubenswrapper[4861]: I1003 13:50:34.212906 4861 scope.go:117] "RemoveContainer" containerID="d8c9692c5afc364d3518624df673f7e65a1fe92e46a015f8a19e45916a42b14c" Oct 03 13:50:38 crc kubenswrapper[4861]: I1003 13:50:38.262764 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: connect: connection refused" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.314595 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fb9qp" event={"ID":"caf4725e-be55-4527-b7b6-3be4e6e1999d","Type":"ContainerDied","Data":"c7e91d94c9e06bc404b4fbc3a0fb5db1ed68896f52e0bb3fff333bf5386cc32f"} Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.315050 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7e91d94c9e06bc404b4fbc3a0fb5db1ed68896f52e0bb3fff333bf5386cc32f" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.321366 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lj9xf" event={"ID":"4e178ffa-097e-4f43-a023-dcb3db4f2c6d","Type":"ContainerDied","Data":"e58cf0ec46b9125090740e20879ad9fde1deae800cbb5c471ce7c6e1961116e9"} Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.321403 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e58cf0ec46b9125090740e20879ad9fde1deae800cbb5c471ce7c6e1961116e9" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.360908 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.367985 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.390908 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-combined-ca-bundle\") pod \"caf4725e-be55-4527-b7b6-3be4e6e1999d\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.390962 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvww6\" (UniqueName: \"kubernetes.io/projected/caf4725e-be55-4527-b7b6-3be4e6e1999d-kube-api-access-qvww6\") pod \"caf4725e-be55-4527-b7b6-3be4e6e1999d\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.390996 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-credential-keys\") pod \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391045 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-289cb\" (UniqueName: \"kubernetes.io/projected/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-kube-api-access-289cb\") pod \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391069 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-fernet-keys\") pod \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391102 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-db-sync-config-data\") pod \"caf4725e-be55-4527-b7b6-3be4e6e1999d\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391127 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-scripts\") pod \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391149 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-config-data\") pod \"caf4725e-be55-4527-b7b6-3be4e6e1999d\" (UID: \"caf4725e-be55-4527-b7b6-3be4e6e1999d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391183 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-combined-ca-bundle\") pod \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.391932 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-config-data\") pod \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\" (UID: \"4e178ffa-097e-4f43-a023-dcb3db4f2c6d\") " Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.409743 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "caf4725e-be55-4527-b7b6-3be4e6e1999d" (UID: "caf4725e-be55-4527-b7b6-3be4e6e1999d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.409808 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caf4725e-be55-4527-b7b6-3be4e6e1999d-kube-api-access-qvww6" (OuterVolumeSpecName: "kube-api-access-qvww6") pod "caf4725e-be55-4527-b7b6-3be4e6e1999d" (UID: "caf4725e-be55-4527-b7b6-3be4e6e1999d"). InnerVolumeSpecName "kube-api-access-qvww6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.422953 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4e178ffa-097e-4f43-a023-dcb3db4f2c6d" (UID: "4e178ffa-097e-4f43-a023-dcb3db4f2c6d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.428321 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-kube-api-access-289cb" (OuterVolumeSpecName: "kube-api-access-289cb") pod "4e178ffa-097e-4f43-a023-dcb3db4f2c6d" (UID: "4e178ffa-097e-4f43-a023-dcb3db4f2c6d"). InnerVolumeSpecName "kube-api-access-289cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.430784 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-scripts" (OuterVolumeSpecName: "scripts") pod "4e178ffa-097e-4f43-a023-dcb3db4f2c6d" (UID: "4e178ffa-097e-4f43-a023-dcb3db4f2c6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.462501 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4e178ffa-097e-4f43-a023-dcb3db4f2c6d" (UID: "4e178ffa-097e-4f43-a023-dcb3db4f2c6d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.463340 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "caf4725e-be55-4527-b7b6-3be4e6e1999d" (UID: "caf4725e-be55-4527-b7b6-3be4e6e1999d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.486754 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-config-data" (OuterVolumeSpecName: "config-data") pod "4e178ffa-097e-4f43-a023-dcb3db4f2c6d" (UID: "4e178ffa-097e-4f43-a023-dcb3db4f2c6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494211 4861 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494272 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-289cb\" (UniqueName: \"kubernetes.io/projected/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-kube-api-access-289cb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494286 4861 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494296 4861 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494307 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494316 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494324 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.494333 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvww6\" (UniqueName: \"kubernetes.io/projected/caf4725e-be55-4527-b7b6-3be4e6e1999d-kube-api-access-qvww6\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.508829 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e178ffa-097e-4f43-a023-dcb3db4f2c6d" (UID: "4e178ffa-097e-4f43-a023-dcb3db4f2c6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.537259 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-config-data" (OuterVolumeSpecName: "config-data") pod "caf4725e-be55-4527-b7b6-3be4e6e1999d" (UID: "caf4725e-be55-4527-b7b6-3be4e6e1999d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.595601 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caf4725e-be55-4527-b7b6-3be4e6e1999d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:42 crc kubenswrapper[4861]: I1003 13:50:42.595833 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e178ffa-097e-4f43-a023-dcb3db4f2c6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.328901 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lj9xf" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.328910 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fb9qp" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.462740 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lj9xf"] Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.468929 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lj9xf"] Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.550375 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cmgtn"] Oct 03 13:50:43 crc kubenswrapper[4861]: E1003 13:50:43.551105 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf4725e-be55-4527-b7b6-3be4e6e1999d" containerName="glance-db-sync" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.551189 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf4725e-be55-4527-b7b6-3be4e6e1999d" containerName="glance-db-sync" Oct 03 13:50:43 crc kubenswrapper[4861]: E1003 13:50:43.551274 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e178ffa-097e-4f43-a023-dcb3db4f2c6d" containerName="keystone-bootstrap" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.551343 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e178ffa-097e-4f43-a023-dcb3db4f2c6d" containerName="keystone-bootstrap" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.551623 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="caf4725e-be55-4527-b7b6-3be4e6e1999d" containerName="glance-db-sync" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.552312 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e178ffa-097e-4f43-a023-dcb3db4f2c6d" containerName="keystone-bootstrap" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.553535 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.555670 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.556125 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x97lk" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.557515 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cmgtn"] Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.557554 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.558722 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.616727 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-scripts\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.616810 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-credential-keys\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.616849 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-config-data\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.616897 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf99d\" (UniqueName: \"kubernetes.io/projected/dd37928f-4e28-4ff6-b3bf-5baa2941c432-kube-api-access-kf99d\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.616946 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-combined-ca-bundle\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.617023 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-fernet-keys\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.718474 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-fernet-keys\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.718576 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-scripts\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.718621 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-credential-keys\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.718642 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-config-data\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.718684 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf99d\" (UniqueName: \"kubernetes.io/projected/dd37928f-4e28-4ff6-b3bf-5baa2941c432-kube-api-access-kf99d\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.718717 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-combined-ca-bundle\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.724673 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-credential-keys\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.732871 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-config-data\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.734541 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-combined-ca-bundle\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.740534 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-scripts\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.749608 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf99d\" (UniqueName: \"kubernetes.io/projected/dd37928f-4e28-4ff6-b3bf-5baa2941c432-kube-api-access-kf99d\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.749705 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-fernet-keys\") pod \"keystone-bootstrap-cmgtn\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:43 crc kubenswrapper[4861]: I1003 13:50:43.869708 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:43.997474 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-wp2rm"] Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:43.999761 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.017089 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-wp2rm"] Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.143833 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j56b\" (UniqueName: \"kubernetes.io/projected/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-kube-api-access-5j56b\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.143932 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.143994 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.144039 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-config\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.144067 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.144086 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.245167 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.245254 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.245284 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-config\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.245309 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.245333 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.245420 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j56b\" (UniqueName: \"kubernetes.io/projected/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-kube-api-access-5j56b\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.246325 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.246384 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.247016 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.247113 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.248003 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-config\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.281057 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j56b\" (UniqueName: \"kubernetes.io/projected/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-kube-api-access-5j56b\") pod \"dnsmasq-dns-8b5c85b87-wp2rm\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.359459 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.701276 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e178ffa-097e-4f43-a023-dcb3db4f2c6d" path="/var/lib/kubelet/pods/4e178ffa-097e-4f43-a023-dcb3db4f2c6d/volumes" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.883529 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.885966 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.888122 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-whl4w" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.889665 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.889927 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 03 13:50:44 crc kubenswrapper[4861]: I1003 13:50:44.909082 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:50:44 crc kubenswrapper[4861]: E1003 13:50:44.935831 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 03 13:50:44 crc kubenswrapper[4861]: E1003 13:50:44.935984 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x5l5f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-f6j5h_openstack(7b148d26-3aac-44de-9776-c03b03c5fff2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:50:44 crc kubenswrapper[4861]: E1003 13:50:44.937193 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-f6j5h" podUID="7b148d26-3aac-44de-9776-c03b03c5fff2" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061375 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-config-data\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061652 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061683 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-logs\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061747 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-scripts\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061799 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6c4z\" (UniqueName: \"kubernetes.io/projected/d4fbbbc0-39de-4055-8a23-7ed84b839b41-kube-api-access-f6c4z\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061823 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.061844 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163064 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6c4z\" (UniqueName: \"kubernetes.io/projected/d4fbbbc0-39de-4055-8a23-7ed84b839b41-kube-api-access-f6c4z\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163108 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163127 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163177 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-config-data\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163211 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163245 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-logs\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.163291 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-scripts\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.164454 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-logs\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.164443 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.169617 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-config-data\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.176992 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.177825 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-scripts\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.177971 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.181692 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6c4z\" (UniqueName: \"kubernetes.io/projected/d4fbbbc0-39de-4055-8a23-7ed84b839b41-kube-api-access-f6c4z\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.208819 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.332485 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.334033 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.337006 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.350730 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:50:45 crc kubenswrapper[4861]: E1003 13:50:45.386880 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-f6j5h" podUID="7b148d26-3aac-44de-9776-c03b03c5fff2" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.479797 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.479854 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.479893 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.479918 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.479935 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.479986 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zx7v\" (UniqueName: \"kubernetes.io/projected/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-kube-api-access-7zx7v\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.480006 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.508184 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582030 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zx7v\" (UniqueName: \"kubernetes.io/projected/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-kube-api-access-7zx7v\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582074 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582212 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582264 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582316 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582347 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.582371 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.583055 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.583461 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.583985 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.589206 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.592665 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.596570 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.604366 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zx7v\" (UniqueName: \"kubernetes.io/projected/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-kube-api-access-7zx7v\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.612895 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:50:45 crc kubenswrapper[4861]: I1003 13:50:45.659383 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:50:46 crc kubenswrapper[4861]: I1003 13:50:46.610153 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:50:46 crc kubenswrapper[4861]: I1003 13:50:46.716041 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:50:48 crc kubenswrapper[4861]: I1003 13:50:48.263033 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Oct 03 13:50:48 crc kubenswrapper[4861]: I1003 13:50:48.263622 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:50:50 crc kubenswrapper[4861]: E1003 13:50:50.396072 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 03 13:50:50 crc kubenswrapper[4861]: E1003 13:50:50.396517 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n75h54fh559h554hf5h5cbhd7h589h5dfh5d5h684h557h674hffhb5h669hb8h54fh59dh9h7dh9h54ch9dh9h658h5c9h684h8bh676h59bh569q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7fr6d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6d4b7fc5ff-4blmt_openstack(6fffc9fe-810c-40f8-b484-40a8fc4ed3a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:50:50 crc kubenswrapper[4861]: E1003 13:50:50.398358 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6d4b7fc5ff-4blmt" podUID="6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" Oct 03 13:50:50 crc kubenswrapper[4861]: E1003 13:50:50.425654 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 03 13:50:50 crc kubenswrapper[4861]: E1003 13:50:50.425868 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d4h577h588h5b5h67fh55fhd5h9dh579h65fhc4h64fh5bh5d7h67dh547h574hb4h6h667h5bch6h7ch5bfh5fch5c5h64fh5c8h6dh8ch68bhfcq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkxgs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6865665b8c-nhrvv_openstack(5c7b3e0b-c320-4a02-8140-932d108fa189): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:50:50 crc kubenswrapper[4861]: E1003 13:50:50.433356 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6865665b8c-nhrvv" podUID="5c7b3e0b-c320-4a02-8140-932d108fa189" Oct 03 13:50:52 crc kubenswrapper[4861]: E1003 13:50:52.454497 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 03 13:50:52 crc kubenswrapper[4861]: E1003 13:50:52.454987 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n679h59ch56fhc6h58h6ch64bh554h9fh68dh58dh79h558h5b9h594h695h5ddh64dh57dh595h689h56bh66bh57dh67h88h644h675hd4h5b4h55chf8q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7vz4w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-549449d4bc-ffsq9_openstack(84aa45eb-6ad6-41af-91b9-b6b7b7a43790): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:50:52 crc kubenswrapper[4861]: E1003 13:50:52.458333 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-549449d4bc-ffsq9" podUID="84aa45eb-6ad6-41af-91b9-b6b7b7a43790" Oct 03 13:50:53 crc kubenswrapper[4861]: I1003 13:50:53.263932 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.048050 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.048254 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fh644,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-ksvcg_openstack(456069ef-db45-4878-85d5-1e5001fa789e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.049727 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-ksvcg" podUID="456069ef-db45-4878-85d5-1e5001fa789e" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.116835 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.117125 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.200914 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-svc\") pod \"658e6aab-e882-407b-bd13-1c293b326996\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201026 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c7b3e0b-c320-4a02-8140-932d108fa189-logs\") pod \"5c7b3e0b-c320-4a02-8140-932d108fa189\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201147 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkxgs\" (UniqueName: \"kubernetes.io/projected/5c7b3e0b-c320-4a02-8140-932d108fa189-kube-api-access-kkxgs\") pod \"5c7b3e0b-c320-4a02-8140-932d108fa189\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201188 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c7b3e0b-c320-4a02-8140-932d108fa189-horizon-secret-key\") pod \"5c7b3e0b-c320-4a02-8140-932d108fa189\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201255 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p869c\" (UniqueName: \"kubernetes.io/projected/658e6aab-e882-407b-bd13-1c293b326996-kube-api-access-p869c\") pod \"658e6aab-e882-407b-bd13-1c293b326996\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201274 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-sb\") pod \"658e6aab-e882-407b-bd13-1c293b326996\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201335 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-config\") pod \"658e6aab-e882-407b-bd13-1c293b326996\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201359 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-swift-storage-0\") pod \"658e6aab-e882-407b-bd13-1c293b326996\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201400 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-nb\") pod \"658e6aab-e882-407b-bd13-1c293b326996\" (UID: \"658e6aab-e882-407b-bd13-1c293b326996\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201418 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-scripts\") pod \"5c7b3e0b-c320-4a02-8140-932d108fa189\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201483 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-config-data\") pod \"5c7b3e0b-c320-4a02-8140-932d108fa189\" (UID: \"5c7b3e0b-c320-4a02-8140-932d108fa189\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.201573 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c7b3e0b-c320-4a02-8140-932d108fa189-logs" (OuterVolumeSpecName: "logs") pod "5c7b3e0b-c320-4a02-8140-932d108fa189" (UID: "5c7b3e0b-c320-4a02-8140-932d108fa189"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.202005 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c7b3e0b-c320-4a02-8140-932d108fa189-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.203148 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-config-data" (OuterVolumeSpecName: "config-data") pod "5c7b3e0b-c320-4a02-8140-932d108fa189" (UID: "5c7b3e0b-c320-4a02-8140-932d108fa189"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.203724 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-scripts" (OuterVolumeSpecName: "scripts") pod "5c7b3e0b-c320-4a02-8140-932d108fa189" (UID: "5c7b3e0b-c320-4a02-8140-932d108fa189"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.209913 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c7b3e0b-c320-4a02-8140-932d108fa189-kube-api-access-kkxgs" (OuterVolumeSpecName: "kube-api-access-kkxgs") pod "5c7b3e0b-c320-4a02-8140-932d108fa189" (UID: "5c7b3e0b-c320-4a02-8140-932d108fa189"). InnerVolumeSpecName "kube-api-access-kkxgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.221102 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/658e6aab-e882-407b-bd13-1c293b326996-kube-api-access-p869c" (OuterVolumeSpecName: "kube-api-access-p869c") pod "658e6aab-e882-407b-bd13-1c293b326996" (UID: "658e6aab-e882-407b-bd13-1c293b326996"). InnerVolumeSpecName "kube-api-access-p869c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.229835 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7b3e0b-c320-4a02-8140-932d108fa189-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5c7b3e0b-c320-4a02-8140-932d108fa189" (UID: "5c7b3e0b-c320-4a02-8140-932d108fa189"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.251809 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "658e6aab-e882-407b-bd13-1c293b326996" (UID: "658e6aab-e882-407b-bd13-1c293b326996"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.271643 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "658e6aab-e882-407b-bd13-1c293b326996" (UID: "658e6aab-e882-407b-bd13-1c293b326996"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.276180 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "658e6aab-e882-407b-bd13-1c293b326996" (UID: "658e6aab-e882-407b-bd13-1c293b326996"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.291988 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "658e6aab-e882-407b-bd13-1c293b326996" (UID: "658e6aab-e882-407b-bd13-1c293b326996"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304634 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304668 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304680 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkxgs\" (UniqueName: \"kubernetes.io/projected/5c7b3e0b-c320-4a02-8140-932d108fa189-kube-api-access-kkxgs\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304692 4861 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c7b3e0b-c320-4a02-8140-932d108fa189-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304702 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p869c\" (UniqueName: \"kubernetes.io/projected/658e6aab-e882-407b-bd13-1c293b326996-kube-api-access-p869c\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304713 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304726 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304737 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.304749 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c7b3e0b-c320-4a02-8140-932d108fa189-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.306964 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-config" (OuterVolumeSpecName: "config") pod "658e6aab-e882-407b-bd13-1c293b326996" (UID: "658e6aab-e882-407b-bd13-1c293b326996"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.406110 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/658e6aab-e882-407b-bd13-1c293b326996-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.462037 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6865665b8c-nhrvv" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.462036 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6865665b8c-nhrvv" event={"ID":"5c7b3e0b-c320-4a02-8140-932d108fa189","Type":"ContainerDied","Data":"f70fd01c4558779adb6ba8a35e3cacc7e17ce11506a8e699268bc8e3f0fd1d30"} Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.465614 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" event={"ID":"658e6aab-e882-407b-bd13-1c293b326996","Type":"ContainerDied","Data":"1806d26ed176086f59a48a1c74f6d18318a2648dbaa24de5feceeaa409242240"} Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.465637 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.468490 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-ksvcg" podUID="456069ef-db45-4878-85d5-1e5001fa789e" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.530905 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-wv5dm"] Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.554324 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-wv5dm"] Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.590350 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6865665b8c-nhrvv"] Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.596941 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6865665b8c-nhrvv"] Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.691538 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c7b3e0b-c320-4a02-8140-932d108fa189" path="/var/lib/kubelet/pods/5c7b3e0b-c320-4a02-8140-932d108fa189/volumes" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.692244 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="658e6aab-e882-407b-bd13-1c293b326996" path="/var/lib/kubelet/pods/658e6aab-e882-407b-bd13-1c293b326996/volumes" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.753400 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.760628 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.760760 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b2xpr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-h4wcn_openstack(3b0f621f-bdf0-4768-a764-0bc15e01faba): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:50:54 crc kubenswrapper[4861]: E1003 13:50:54.761936 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-h4wcn" podUID="3b0f621f-bdf0-4768-a764-0bc15e01faba" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.811264 4861 scope.go:117] "RemoveContainer" containerID="fb0d4456c25e1544bde81cd98471695bf986955f49d4344a6205e03f8fd21e05" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.811929 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-scripts\") pod \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.811981 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-config-data\") pod \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812064 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-logs\") pod \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812145 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fr6d\" (UniqueName: \"kubernetes.io/projected/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-kube-api-access-7fr6d\") pod \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812189 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-horizon-secret-key\") pod \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\" (UID: \"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812546 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-logs" (OuterVolumeSpecName: "logs") pod "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" (UID: "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812597 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-scripts" (OuterVolumeSpecName: "scripts") pod "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" (UID: "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812615 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-config-data" (OuterVolumeSpecName: "config-data") pod "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" (UID: "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812859 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812875 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.812885 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.815393 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" (UID: "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.818485 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-kube-api-access-7fr6d" (OuterVolumeSpecName: "kube-api-access-7fr6d") pod "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" (UID: "6fffc9fe-810c-40f8-b484-40a8fc4ed3a6"). InnerVolumeSpecName "kube-api-access-7fr6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.827174 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.917385 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-horizon-secret-key\") pod \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.917517 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-logs\") pod \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.917555 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vz4w\" (UniqueName: \"kubernetes.io/projected/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-kube-api-access-7vz4w\") pod \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.917636 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-scripts\") pod \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.917902 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-config-data\") pod \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\" (UID: \"84aa45eb-6ad6-41af-91b9-b6b7b7a43790\") " Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.919160 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fr6d\" (UniqueName: \"kubernetes.io/projected/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-kube-api-access-7fr6d\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.919180 4861 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.920387 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-config-data" (OuterVolumeSpecName: "config-data") pod "84aa45eb-6ad6-41af-91b9-b6b7b7a43790" (UID: "84aa45eb-6ad6-41af-91b9-b6b7b7a43790"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.920781 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-logs" (OuterVolumeSpecName: "logs") pod "84aa45eb-6ad6-41af-91b9-b6b7b7a43790" (UID: "84aa45eb-6ad6-41af-91b9-b6b7b7a43790"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.921324 4861 scope.go:117] "RemoveContainer" containerID="9798c1ac4b36142aab68c5310ebbfe2091c1de7e63034ad81ed2b2c7031df9d5" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.921762 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-scripts" (OuterVolumeSpecName: "scripts") pod "84aa45eb-6ad6-41af-91b9-b6b7b7a43790" (UID: "84aa45eb-6ad6-41af-91b9-b6b7b7a43790"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.923557 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "84aa45eb-6ad6-41af-91b9-b6b7b7a43790" (UID: "84aa45eb-6ad6-41af-91b9-b6b7b7a43790"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:54 crc kubenswrapper[4861]: I1003 13:50:54.930186 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-kube-api-access-7vz4w" (OuterVolumeSpecName: "kube-api-access-7vz4w") pod "84aa45eb-6ad6-41af-91b9-b6b7b7a43790" (UID: "84aa45eb-6ad6-41af-91b9-b6b7b7a43790"). InnerVolumeSpecName "kube-api-access-7vz4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.029938 4861 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.030251 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.030265 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vz4w\" (UniqueName: \"kubernetes.io/projected/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-kube-api-access-7vz4w\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.030275 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.030283 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84aa45eb-6ad6-41af-91b9-b6b7b7a43790-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.456859 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-wp2rm"] Oct 03 13:50:55 crc kubenswrapper[4861]: W1003 13:50:55.466301 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod915a3de5_7d96_4d7d_83fa_96835dfdd0c1.slice/crio-7d44e785159de4f0d80772c89e4d3c643f1c9ed8b4e7316b14e2e6b994616e49 WatchSource:0}: Error finding container 7d44e785159de4f0d80772c89e4d3c643f1c9ed8b4e7316b14e2e6b994616e49: Status 404 returned error can't find the container with id 7d44e785159de4f0d80772c89e4d3c643f1c9ed8b4e7316b14e2e6b994616e49 Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.488127 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-549449d4bc-ffsq9" event={"ID":"84aa45eb-6ad6-41af-91b9-b6b7b7a43790","Type":"ContainerDied","Data":"f9864f56d97c8f286c6c4da517d06b8fb796168347fac0a378c780c3a20ac506"} Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.488159 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-549449d4bc-ffsq9" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.497100 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerStarted","Data":"5af7b441f7b61362145e5edc152e1d729bb66a3c297fbd1c3320b8cdd862a9f6"} Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.507639 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cmgtn"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.508843 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d4b7fc5ff-4blmt" event={"ID":"6fffc9fe-810c-40f8-b484-40a8fc4ed3a6","Type":"ContainerDied","Data":"e9d6c48819bc96edde4b9eaed26be773d623e5089bd28d59682a01180ea8b0f1"} Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.508930 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d4b7fc5ff-4blmt" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.515424 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"13a0d25a9a90da6fc94ead2cdfeed1d7dc6194708455cf7dc135deca83d68d28"} Oct 03 13:50:55 crc kubenswrapper[4861]: E1003 13:50:55.517297 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-h4wcn" podUID="3b0f621f-bdf0-4768-a764-0bc15e01faba" Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.709733 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-549449d4bc-ffsq9"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.722855 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-549449d4bc-ffsq9"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.743887 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.766825 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d4b7fc5ff-4blmt"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.775838 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6d4b7fc5ff-4blmt"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.919071 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c8cb9d9fb-bt6ls"] Oct 03 13:50:55 crc kubenswrapper[4861]: I1003 13:50:55.936667 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84cdb7b9dd-jhc2h"] Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.000464 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.537099 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cmgtn" event={"ID":"dd37928f-4e28-4ff6-b3bf-5baa2941c432","Type":"ContainerStarted","Data":"ebc96b17736f951d05b7af82a83fb68c57336d48d0621a313b8ce7a6beefa295"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.537549 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cmgtn" event={"ID":"dd37928f-4e28-4ff6-b3bf-5baa2941c432","Type":"ContainerStarted","Data":"5323c325eb669e5021b2aa201247b05e1c3c1176a44ce2936facabe703119ac2"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.541792 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8cb9d9fb-bt6ls" event={"ID":"81ec621b-cc30-4ab2-ae0e-bdd71629009f","Type":"ContainerStarted","Data":"08a7e7d2fe94b66bef5d58cac566ef8b5ddd5c26cf33e5e81dd0cb99b324ee9a"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.547622 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerStarted","Data":"6e75197dcad9146f3d5006e8e847a34ae7cfdcff13b7ff1da0ae4e264cd7cb12"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.549044 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825","Type":"ContainerStarted","Data":"1deaef7a35150c79329f9882aa72800859be05ef31e23a040a1332fedd4018e9"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.550676 4861 generic.go:334] "Generic (PLEG): container finished" podID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerID="974c9f9765b74a9bd19e4877ab4a7c52e552a3eb585420967bee66ec1ab25b27" exitCode=0 Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.550739 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" event={"ID":"915a3de5-7d96-4d7d-83fa-96835dfdd0c1","Type":"ContainerDied","Data":"974c9f9765b74a9bd19e4877ab4a7c52e552a3eb585420967bee66ec1ab25b27"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.550758 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" event={"ID":"915a3de5-7d96-4d7d-83fa-96835dfdd0c1","Type":"ContainerStarted","Data":"7d44e785159de4f0d80772c89e4d3c643f1c9ed8b4e7316b14e2e6b994616e49"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.561123 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cmgtn" podStartSLOduration=13.561099949 podStartE2EDuration="13.561099949s" podCreationTimestamp="2025-10-03 13:50:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:56.551715107 +0000 UTC m=+1170.549700154" watchObservedRunningTime="2025-10-03 13:50:56.561099949 +0000 UTC m=+1170.559084996" Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.566954 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4fbbbc0-39de-4055-8a23-7ed84b839b41","Type":"ContainerStarted","Data":"859ee48bdc8456489cd527521bdd9d62db2af9fda97145f0a06ab8e6c034339b"} Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.694766 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fffc9fe-810c-40f8-b484-40a8fc4ed3a6" path="/var/lib/kubelet/pods/6fffc9fe-810c-40f8-b484-40a8fc4ed3a6/volumes" Oct 03 13:50:56 crc kubenswrapper[4861]: I1003 13:50:56.696198 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84aa45eb-6ad6-41af-91b9-b6b7b7a43790" path="/var/lib/kubelet/pods/84aa45eb-6ad6-41af-91b9-b6b7b7a43790/volumes" Oct 03 13:50:57 crc kubenswrapper[4861]: I1003 13:50:57.588995 4861 generic.go:334] "Generic (PLEG): container finished" podID="a7415066-e954-4d19-9167-1a795f87add7" containerID="0c38c0749c2fee8296c898b93d4f827db7e259dfb4be403af990d096498787d6" exitCode=0 Oct 03 13:50:57 crc kubenswrapper[4861]: I1003 13:50:57.589710 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cgf9r" event={"ID":"a7415066-e954-4d19-9167-1a795f87add7","Type":"ContainerDied","Data":"0c38c0749c2fee8296c898b93d4f827db7e259dfb4be403af990d096498787d6"} Oct 03 13:50:57 crc kubenswrapper[4861]: I1003 13:50:57.602915 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825","Type":"ContainerStarted","Data":"df61efdecd6932feee0916640c6ca5c639153ff235ed6c113ea31407ad3d27e9"} Oct 03 13:50:57 crc kubenswrapper[4861]: I1003 13:50:57.611199 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4fbbbc0-39de-4055-8a23-7ed84b839b41","Type":"ContainerStarted","Data":"784b8c8784e5d1b6db6dd86484e01c57f9f54f0569758c3c5e69d09c1f2afa61"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.264862 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-wv5dm" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.632077 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" event={"ID":"915a3de5-7d96-4d7d-83fa-96835dfdd0c1","Type":"ContainerStarted","Data":"7fa833238a0608846caafa0e212160f4747859d6d91ed3ce86b1c6799f1ce4ea"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.632336 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.636582 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4fbbbc0-39de-4055-8a23-7ed84b839b41","Type":"ContainerStarted","Data":"1b26b3b3b82e35808b25d763838e1fb3fbc91015428b0d734fff30991e8b9829"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.636744 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-log" containerID="cri-o://784b8c8784e5d1b6db6dd86484e01c57f9f54f0569758c3c5e69d09c1f2afa61" gracePeriod=30 Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.636839 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-httpd" containerID="cri-o://1b26b3b3b82e35808b25d763838e1fb3fbc91015428b0d734fff30991e8b9829" gracePeriod=30 Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.643019 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerStarted","Data":"e050f64840966798f0046b4fbc8341a52b6af025718938a28c8d770b92dac0b8"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.651597 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f6j5h" event={"ID":"7b148d26-3aac-44de-9776-c03b03c5fff2","Type":"ContainerStarted","Data":"4c4adbc9f6f31d3c37ae6cbde5ed1a833e9d9b61cb14a154f12f609ed560cbfd"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.656332 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8cb9d9fb-bt6ls" event={"ID":"81ec621b-cc30-4ab2-ae0e-bdd71629009f","Type":"ContainerStarted","Data":"834a5cec05a299ac4024b21688ce1b56239064614bf930e0fb726a9fa037c4fb"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.656374 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8cb9d9fb-bt6ls" event={"ID":"81ec621b-cc30-4ab2-ae0e-bdd71629009f","Type":"ContainerStarted","Data":"f4d740acff55138d44ff154676c8dc6e7a22791c04e772d79582a021565c7c34"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.657157 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" podStartSLOduration=15.657138052 podStartE2EDuration="15.657138052s" podCreationTimestamp="2025-10-03 13:50:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:58.652871248 +0000 UTC m=+1172.650856285" watchObservedRunningTime="2025-10-03 13:50:58.657138052 +0000 UTC m=+1172.655123099" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.664578 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerStarted","Data":"ddeaa56e9aa6c6ba89a75f8f405df5f2eebbad9b1e8dee7d0758a5aa07447be8"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.664619 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerStarted","Data":"963ef2ef3f5f426b3763350ce9604beea4e5c0db8da7c36621492d44753ff880"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.668868 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-log" containerID="cri-o://df61efdecd6932feee0916640c6ca5c639153ff235ed6c113ea31407ad3d27e9" gracePeriod=30 Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.668931 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825","Type":"ContainerStarted","Data":"4120dcd8203d2733e62e9471f38bfceb97f60904dd990938e8742423779c0e18"} Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.668979 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-httpd" containerID="cri-o://4120dcd8203d2733e62e9471f38bfceb97f60904dd990938e8742423779c0e18" gracePeriod=30 Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.707959 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=15.70794022 podStartE2EDuration="15.70794022s" podCreationTimestamp="2025-10-03 13:50:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:58.698777226 +0000 UTC m=+1172.696762273" watchObservedRunningTime="2025-10-03 13:50:58.70794022 +0000 UTC m=+1172.705925267" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.712388 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-f6j5h" podStartSLOduration=3.85402849 podStartE2EDuration="39.712366708s" podCreationTimestamp="2025-10-03 13:50:19 +0000 UTC" firstStartedPulling="2025-10-03 13:50:21.714400549 +0000 UTC m=+1135.712385596" lastFinishedPulling="2025-10-03 13:50:57.572738767 +0000 UTC m=+1171.570723814" observedRunningTime="2025-10-03 13:50:58.6764985 +0000 UTC m=+1172.674483547" watchObservedRunningTime="2025-10-03 13:50:58.712366708 +0000 UTC m=+1172.710351755" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.720398 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.720445 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.765298 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=14.765280233 podStartE2EDuration="14.765280233s" podCreationTimestamp="2025-10-03 13:50:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:50:58.763606789 +0000 UTC m=+1172.761591836" watchObservedRunningTime="2025-10-03 13:50:58.765280233 +0000 UTC m=+1172.763265280" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.768796 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-84cdb7b9dd-jhc2h" podStartSLOduration=29.517071108 podStartE2EDuration="30.768775977s" podCreationTimestamp="2025-10-03 13:50:28 +0000 UTC" firstStartedPulling="2025-10-03 13:50:55.960072448 +0000 UTC m=+1169.958057495" lastFinishedPulling="2025-10-03 13:50:57.211777317 +0000 UTC m=+1171.209762364" observedRunningTime="2025-10-03 13:50:58.7318839 +0000 UTC m=+1172.729868967" watchObservedRunningTime="2025-10-03 13:50:58.768775977 +0000 UTC m=+1172.766761014" Oct 03 13:50:58 crc kubenswrapper[4861]: I1003 13:50:58.800289 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podStartSLOduration=29.577863514 podStartE2EDuration="30.800271439s" podCreationTimestamp="2025-10-03 13:50:28 +0000 UTC" firstStartedPulling="2025-10-03 13:50:55.960189411 +0000 UTC m=+1169.958174458" lastFinishedPulling="2025-10-03 13:50:57.182597336 +0000 UTC m=+1171.180582383" observedRunningTime="2025-10-03 13:50:58.798517852 +0000 UTC m=+1172.796502899" watchObservedRunningTime="2025-10-03 13:50:58.800271439 +0000 UTC m=+1172.798256486" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.132817 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.231816 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-config\") pod \"a7415066-e954-4d19-9167-1a795f87add7\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.231959 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qhrl\" (UniqueName: \"kubernetes.io/projected/a7415066-e954-4d19-9167-1a795f87add7-kube-api-access-4qhrl\") pod \"a7415066-e954-4d19-9167-1a795f87add7\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.232014 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-combined-ca-bundle\") pod \"a7415066-e954-4d19-9167-1a795f87add7\" (UID: \"a7415066-e954-4d19-9167-1a795f87add7\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.251673 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7415066-e954-4d19-9167-1a795f87add7-kube-api-access-4qhrl" (OuterVolumeSpecName: "kube-api-access-4qhrl") pod "a7415066-e954-4d19-9167-1a795f87add7" (UID: "a7415066-e954-4d19-9167-1a795f87add7"). InnerVolumeSpecName "kube-api-access-4qhrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.338087 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qhrl\" (UniqueName: \"kubernetes.io/projected/a7415066-e954-4d19-9167-1a795f87add7-kube-api-access-4qhrl\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.362591 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7415066-e954-4d19-9167-1a795f87add7" (UID: "a7415066-e954-4d19-9167-1a795f87add7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.370305 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-config" (OuterVolumeSpecName: "config") pod "a7415066-e954-4d19-9167-1a795f87add7" (UID: "a7415066-e954-4d19-9167-1a795f87add7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.439994 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.440325 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7415066-e954-4d19-9167-1a795f87add7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.728497 4861 generic.go:334] "Generic (PLEG): container finished" podID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerID="4120dcd8203d2733e62e9471f38bfceb97f60904dd990938e8742423779c0e18" exitCode=0 Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.728528 4861 generic.go:334] "Generic (PLEG): container finished" podID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerID="df61efdecd6932feee0916640c6ca5c639153ff235ed6c113ea31407ad3d27e9" exitCode=143 Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.728566 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825","Type":"ContainerDied","Data":"4120dcd8203d2733e62e9471f38bfceb97f60904dd990938e8742423779c0e18"} Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.728592 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825","Type":"ContainerDied","Data":"df61efdecd6932feee0916640c6ca5c639153ff235ed6c113ea31407ad3d27e9"} Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.750181 4861 generic.go:334] "Generic (PLEG): container finished" podID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerID="1b26b3b3b82e35808b25d763838e1fb3fbc91015428b0d734fff30991e8b9829" exitCode=0 Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.750220 4861 generic.go:334] "Generic (PLEG): container finished" podID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerID="784b8c8784e5d1b6db6dd86484e01c57f9f54f0569758c3c5e69d09c1f2afa61" exitCode=143 Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.750372 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4fbbbc0-39de-4055-8a23-7ed84b839b41","Type":"ContainerDied","Data":"1b26b3b3b82e35808b25d763838e1fb3fbc91015428b0d734fff30991e8b9829"} Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.750403 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4fbbbc0-39de-4055-8a23-7ed84b839b41","Type":"ContainerDied","Data":"784b8c8784e5d1b6db6dd86484e01c57f9f54f0569758c3c5e69d09c1f2afa61"} Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.772033 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cgf9r" event={"ID":"a7415066-e954-4d19-9167-1a795f87add7","Type":"ContainerDied","Data":"04e50fa17210993866ed1f818fa477e85c4c2ad2038c6447f6d1c9a94cc2a4ba"} Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.772076 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cgf9r" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.772091 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04e50fa17210993866ed1f818fa477e85c4c2ad2038c6447f6d1c9a94cc2a4ba" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.825104 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-wp2rm"] Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.900548 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.941394 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-hxkbs"] Oct 03 13:50:59 crc kubenswrapper[4861]: E1003 13:50:59.941888 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-httpd" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.941914 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-httpd" Oct 03 13:50:59 crc kubenswrapper[4861]: E1003 13:50:59.941930 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="init" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.941938 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="init" Oct 03 13:50:59 crc kubenswrapper[4861]: E1003 13:50:59.941957 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-log" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.941966 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-log" Oct 03 13:50:59 crc kubenswrapper[4861]: E1003 13:50:59.941996 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7415066-e954-4d19-9167-1a795f87add7" containerName="neutron-db-sync" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.942003 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7415066-e954-4d19-9167-1a795f87add7" containerName="neutron-db-sync" Oct 03 13:50:59 crc kubenswrapper[4861]: E1003 13:50:59.942022 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.942030 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.942254 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7415066-e954-4d19-9167-1a795f87add7" containerName="neutron-db-sync" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.942282 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-log" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.942294 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="658e6aab-e882-407b-bd13-1c293b326996" containerName="dnsmasq-dns" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.942308 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" containerName="glance-httpd" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.943495 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.963827 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-combined-ca-bundle\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.963901 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-scripts\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.963928 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-httpd-run\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964061 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964131 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-logs\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964147 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6c4z\" (UniqueName: \"kubernetes.io/projected/d4fbbbc0-39de-4055-8a23-7ed84b839b41-kube-api-access-f6c4z\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964166 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-config-data\") pod \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\" (UID: \"d4fbbbc0-39de-4055-8a23-7ed84b839b41\") " Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964387 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-config\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964414 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtvhl\" (UniqueName: \"kubernetes.io/projected/72839de1-20d4-42dd-b913-3a8cbfffa95d-kube-api-access-wtvhl\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964458 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964511 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964526 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.964565 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.966567 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:50:59 crc kubenswrapper[4861]: I1003 13:50:59.966746 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-logs" (OuterVolumeSpecName: "logs") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.017891 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4fbbbc0-39de-4055-8a23-7ed84b839b41-kube-api-access-f6c4z" (OuterVolumeSpecName: "kube-api-access-f6c4z") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "kube-api-access-f6c4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.024920 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-scripts" (OuterVolumeSpecName: "scripts") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.034425 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.044699 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-hxkbs"] Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.053436 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.067672 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.067783 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.067806 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.067858 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.067939 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-config\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.067971 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtvhl\" (UniqueName: \"kubernetes.io/projected/72839de1-20d4-42dd-b913-3a8cbfffa95d-kube-api-access-wtvhl\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.068033 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.068046 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.068057 4861 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.068080 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.068095 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fbbbc0-39de-4055-8a23-7ed84b839b41-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.068108 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6c4z\" (UniqueName: \"kubernetes.io/projected/d4fbbbc0-39de-4055-8a23-7ed84b839b41-kube-api-access-f6c4z\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.071208 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-config\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.072019 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.072553 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.072577 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.073390 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.174146 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7c8f94f9d8-cqjv4"] Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.175876 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.185047 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.185128 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.185330 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.185484 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kl4jt" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.185587 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.206832 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtvhl\" (UniqueName: \"kubernetes.io/projected/72839de1-20d4-42dd-b913-3a8cbfffa95d-kube-api-access-wtvhl\") pod \"dnsmasq-dns-84b966f6c9-hxkbs\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.233373 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c8f94f9d8-cqjv4"] Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.277613 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-httpd-config\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.277692 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-config\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.277752 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-combined-ca-bundle\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.277783 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv2wc\" (UniqueName: \"kubernetes.io/projected/3cff73d0-7e13-497b-9c06-af6e078031c5-kube-api-access-dv2wc\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.277833 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-ovndb-tls-certs\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.277915 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.334738 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.354426 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-config-data" (OuterVolumeSpecName: "config-data") pod "d4fbbbc0-39de-4055-8a23-7ed84b839b41" (UID: "d4fbbbc0-39de-4055-8a23-7ed84b839b41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.381355 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-httpd-config\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.381417 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-config\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.381474 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-combined-ca-bundle\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.381507 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv2wc\" (UniqueName: \"kubernetes.io/projected/3cff73d0-7e13-497b-9c06-af6e078031c5-kube-api-access-dv2wc\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.381556 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-ovndb-tls-certs\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.382174 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fbbbc0-39de-4055-8a23-7ed84b839b41-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.396157 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-combined-ca-bundle\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.396961 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-ovndb-tls-certs\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.407844 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-config\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.413897 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-httpd-config\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.414727 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv2wc\" (UniqueName: \"kubernetes.io/projected/3cff73d0-7e13-497b-9c06-af6e078031c5-kube-api-access-dv2wc\") pod \"neutron-7c8f94f9d8-cqjv4\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.626351 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.829022 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerName="dnsmasq-dns" containerID="cri-o://7fa833238a0608846caafa0e212160f4747859d6d91ed3ce86b1c6799f1ce4ea" gracePeriod=10 Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.829192 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.829489 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4fbbbc0-39de-4055-8a23-7ed84b839b41","Type":"ContainerDied","Data":"859ee48bdc8456489cd527521bdd9d62db2af9fda97145f0a06ab8e6c034339b"} Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.829615 4861 scope.go:117] "RemoveContainer" containerID="1b26b3b3b82e35808b25d763838e1fb3fbc91015428b0d734fff30991e8b9829" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.875324 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.901885 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.942991 4861 scope.go:117] "RemoveContainer" containerID="784b8c8784e5d1b6db6dd86484e01c57f9f54f0569758c3c5e69d09c1f2afa61" Oct 03 13:51:00 crc kubenswrapper[4861]: I1003 13:51:00.953629 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:00.994926 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:00.995097 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:00.999070 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:00.999448 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.088508 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107393 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-scripts\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107433 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107664 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107734 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107760 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-logs\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107786 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-config-data\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107821 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.107969 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvb6z\" (UniqueName: \"kubernetes.io/projected/e179d8c8-2f07-4b5c-9db5-c26877356f86-kube-api-access-xvb6z\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209435 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-config-data\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209540 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zx7v\" (UniqueName: \"kubernetes.io/projected/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-kube-api-access-7zx7v\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209567 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-combined-ca-bundle\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209609 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-logs\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209660 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209681 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-scripts\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209729 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-httpd-run\") pod \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\" (UID: \"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825\") " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.209998 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvb6z\" (UniqueName: \"kubernetes.io/projected/e179d8c8-2f07-4b5c-9db5-c26877356f86-kube-api-access-xvb6z\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210030 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-scripts\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210050 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210093 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210140 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210157 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-logs\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210173 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-config-data\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210205 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.210663 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.211432 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.212216 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-logs\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.218837 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.219121 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.219633 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-scripts\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.220024 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-logs" (OuterVolumeSpecName: "logs") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.220487 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-hxkbs"] Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.233249 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.233573 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-kube-api-access-7zx7v" (OuterVolumeSpecName: "kube-api-access-7zx7v") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "kube-api-access-7zx7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.234679 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.257069 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-scripts" (OuterVolumeSpecName: "scripts") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.272982 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvb6z\" (UniqueName: \"kubernetes.io/projected/e179d8c8-2f07-4b5c-9db5-c26877356f86-kube-api-access-xvb6z\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.286482 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-config-data\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.313694 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zx7v\" (UniqueName: \"kubernetes.io/projected/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-kube-api-access-7zx7v\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.313985 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.314060 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.314116 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.314169 4861 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.316453 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.363256 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.374814 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.430431 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.436467 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.502304 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-config-data" (OuterVolumeSpecName: "config-data") pod "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" (UID: "5c2c0f80-1867-4d5e-a3fb-76c84f2cf825"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.531885 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.531940 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.627757 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c8f94f9d8-cqjv4"] Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.887918 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" event={"ID":"72839de1-20d4-42dd-b913-3a8cbfffa95d","Type":"ContainerStarted","Data":"76f4bbd36736a1e68e230154d455bc55b13c66e3813720a09463ced34a030cb1"} Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.897562 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c2c0f80-1867-4d5e-a3fb-76c84f2cf825","Type":"ContainerDied","Data":"1deaef7a35150c79329f9882aa72800859be05ef31e23a040a1332fedd4018e9"} Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.897634 4861 scope.go:117] "RemoveContainer" containerID="4120dcd8203d2733e62e9471f38bfceb97f60904dd990938e8742423779c0e18" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.897760 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.932073 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.938275 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.943515 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c8f94f9d8-cqjv4" event={"ID":"3cff73d0-7e13-497b-9c06-af6e078031c5","Type":"ContainerStarted","Data":"53376044f3ca8f1d96214df61f233567d8b93250ca23d1c92ff2148e87c969f1"} Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.966152 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:51:01 crc kubenswrapper[4861]: E1003 13:51:01.966599 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-httpd" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.966613 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-httpd" Oct 03 13:51:01 crc kubenswrapper[4861]: E1003 13:51:01.966629 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-log" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.966635 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-log" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.966815 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-log" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.966830 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" containerName="glance-httpd" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.968147 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.974013 4861 generic.go:334] "Generic (PLEG): container finished" podID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerID="7fa833238a0608846caafa0e212160f4747859d6d91ed3ce86b1c6799f1ce4ea" exitCode=0 Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.974048 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" event={"ID":"915a3de5-7d96-4d7d-83fa-96835dfdd0c1","Type":"ContainerDied","Data":"7fa833238a0608846caafa0e212160f4747859d6d91ed3ce86b1c6799f1ce4ea"} Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.974363 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.974560 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 13:51:01 crc kubenswrapper[4861]: I1003 13:51:01.992436 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.008420 4861 scope.go:117] "RemoveContainer" containerID="df61efdecd6932feee0916640c6ca5c639153ff235ed6c113ea31407ad3d27e9" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041405 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041491 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prfxf\" (UniqueName: \"kubernetes.io/projected/b235907f-2bbf-4402-ac15-7c38c44a7461-kube-api-access-prfxf\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041527 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041559 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041592 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041652 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041686 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-logs\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.041713 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145316 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prfxf\" (UniqueName: \"kubernetes.io/projected/b235907f-2bbf-4402-ac15-7c38c44a7461-kube-api-access-prfxf\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145630 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145668 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145698 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145770 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145806 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-logs\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145836 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.145952 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.146546 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.147017 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.147596 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-logs\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.154111 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.163801 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.166202 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.166953 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.196757 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prfxf\" (UniqueName: \"kubernetes.io/projected/b235907f-2bbf-4402-ac15-7c38c44a7461-kube-api-access-prfxf\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.232621 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.238223 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.364516 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.417611 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.450450 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-nb\") pod \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.450786 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-sb\") pod \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.450833 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-swift-storage-0\") pod \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.450874 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j56b\" (UniqueName: \"kubernetes.io/projected/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-kube-api-access-5j56b\") pod \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.450899 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-config\") pod \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.450944 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-svc\") pod \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\" (UID: \"915a3de5-7d96-4d7d-83fa-96835dfdd0c1\") " Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.532181 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-kube-api-access-5j56b" (OuterVolumeSpecName: "kube-api-access-5j56b") pod "915a3de5-7d96-4d7d-83fa-96835dfdd0c1" (UID: "915a3de5-7d96-4d7d-83fa-96835dfdd0c1"). InnerVolumeSpecName "kube-api-access-5j56b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.583397 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j56b\" (UniqueName: \"kubernetes.io/projected/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-kube-api-access-5j56b\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.702787 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c2c0f80-1867-4d5e-a3fb-76c84f2cf825" path="/var/lib/kubelet/pods/5c2c0f80-1867-4d5e-a3fb-76c84f2cf825/volumes" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.703875 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4fbbbc0-39de-4055-8a23-7ed84b839b41" path="/var/lib/kubelet/pods/d4fbbbc0-39de-4055-8a23-7ed84b839b41/volumes" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.730729 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "915a3de5-7d96-4d7d-83fa-96835dfdd0c1" (UID: "915a3de5-7d96-4d7d-83fa-96835dfdd0c1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.758957 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "915a3de5-7d96-4d7d-83fa-96835dfdd0c1" (UID: "915a3de5-7d96-4d7d-83fa-96835dfdd0c1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.759485 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "915a3de5-7d96-4d7d-83fa-96835dfdd0c1" (UID: "915a3de5-7d96-4d7d-83fa-96835dfdd0c1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.763257 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-config" (OuterVolumeSpecName: "config") pod "915a3de5-7d96-4d7d-83fa-96835dfdd0c1" (UID: "915a3de5-7d96-4d7d-83fa-96835dfdd0c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.764879 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "915a3de5-7d96-4d7d-83fa-96835dfdd0c1" (UID: "915a3de5-7d96-4d7d-83fa-96835dfdd0c1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.794605 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.794653 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.794667 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.794681 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.794695 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/915a3de5-7d96-4d7d-83fa-96835dfdd0c1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.991848 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c8f94f9d8-cqjv4" event={"ID":"3cff73d0-7e13-497b-9c06-af6e078031c5","Type":"ContainerStarted","Data":"c9faed708ce407ef8583884f185ee075b0285f8f584ab17b840c681d39035d0a"} Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.991893 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c8f94f9d8-cqjv4" event={"ID":"3cff73d0-7e13-497b-9c06-af6e078031c5","Type":"ContainerStarted","Data":"f21b64e6bb91b5b17fea39e443b78ac6d0095c8f6bb85a5736dc5043efa19172"} Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.993108 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.994722 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" event={"ID":"915a3de5-7d96-4d7d-83fa-96835dfdd0c1","Type":"ContainerDied","Data":"7d44e785159de4f0d80772c89e4d3c643f1c9ed8b4e7316b14e2e6b994616e49"} Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.994753 4861 scope.go:117] "RemoveContainer" containerID="7fa833238a0608846caafa0e212160f4747859d6d91ed3ce86b1c6799f1ce4ea" Oct 03 13:51:02 crc kubenswrapper[4861]: I1003 13:51:02.994838 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-wp2rm" Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.025665 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e179d8c8-2f07-4b5c-9db5-c26877356f86","Type":"ContainerStarted","Data":"c5d9d53cbf341d79cff5c9fde80795a706d915ed837b35de0445be9a3563a4cd"} Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.075358 4861 generic.go:334] "Generic (PLEG): container finished" podID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerID="cc1e2c047c4feaffd19251d0c40578b631487a89cb37915e37a13c4c87155fb7" exitCode=0 Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.075404 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" event={"ID":"72839de1-20d4-42dd-b913-3a8cbfffa95d","Type":"ContainerDied","Data":"cc1e2c047c4feaffd19251d0c40578b631487a89cb37915e37a13c4c87155fb7"} Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.078442 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7c8f94f9d8-cqjv4" podStartSLOduration=4.078426707 podStartE2EDuration="4.078426707s" podCreationTimestamp="2025-10-03 13:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:03.047584633 +0000 UTC m=+1177.045569680" watchObservedRunningTime="2025-10-03 13:51:03.078426707 +0000 UTC m=+1177.076411744" Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.087825 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-wp2rm"] Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.117886 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-wp2rm"] Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.163048 4861 scope.go:117] "RemoveContainer" containerID="974c9f9765b74a9bd19e4877ab4a7c52e552a3eb585420967bee66ec1ab25b27" Oct 03 13:51:03 crc kubenswrapper[4861]: I1003 13:51:03.393194 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.100969 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e179d8c8-2f07-4b5c-9db5-c26877356f86","Type":"ContainerStarted","Data":"a55d2cf51b6106dad51dd75123df347fa3f197c41aedc1bac6fa6e18ca480564"} Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.103418 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b235907f-2bbf-4402-ac15-7c38c44a7461","Type":"ContainerStarted","Data":"d85a364c6a7602d2789ae153b62171ec025beeadc66ed740e50aeb6cd3419de2"} Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.105414 4861 generic.go:334] "Generic (PLEG): container finished" podID="dd37928f-4e28-4ff6-b3bf-5baa2941c432" containerID="ebc96b17736f951d05b7af82a83fb68c57336d48d0621a313b8ce7a6beefa295" exitCode=0 Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.105457 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cmgtn" event={"ID":"dd37928f-4e28-4ff6-b3bf-5baa2941c432","Type":"ContainerDied","Data":"ebc96b17736f951d05b7af82a83fb68c57336d48d0621a313b8ce7a6beefa295"} Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.112079 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" event={"ID":"72839de1-20d4-42dd-b913-3a8cbfffa95d","Type":"ContainerStarted","Data":"28f655bb301621944344ee708abddcef35e2658feabf7deeec559af2b704545a"} Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.112316 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.127706 4861 generic.go:334] "Generic (PLEG): container finished" podID="7b148d26-3aac-44de-9776-c03b03c5fff2" containerID="4c4adbc9f6f31d3c37ae6cbde5ed1a833e9d9b61cb14a154f12f609ed560cbfd" exitCode=0 Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.127774 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f6j5h" event={"ID":"7b148d26-3aac-44de-9776-c03b03c5fff2","Type":"ContainerDied","Data":"4c4adbc9f6f31d3c37ae6cbde5ed1a833e9d9b61cb14a154f12f609ed560cbfd"} Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.150325 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" podStartSLOduration=5.150297056 podStartE2EDuration="5.150297056s" podCreationTimestamp="2025-10-03 13:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:04.147677766 +0000 UTC m=+1178.145662803" watchObservedRunningTime="2025-10-03 13:51:04.150297056 +0000 UTC m=+1178.148282103" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.463282 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55bc9b5c77-pkdzr"] Oct 03 13:51:04 crc kubenswrapper[4861]: E1003 13:51:04.463747 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerName="init" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.463765 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerName="init" Oct 03 13:51:04 crc kubenswrapper[4861]: E1003 13:51:04.463793 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerName="dnsmasq-dns" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.463799 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerName="dnsmasq-dns" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.464014 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" containerName="dnsmasq-dns" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.465839 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.477173 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.477445 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.492609 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55bc9b5c77-pkdzr"] Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.553515 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-internal-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.553577 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkd9s\" (UniqueName: \"kubernetes.io/projected/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-kube-api-access-xkd9s\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.553601 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-ovndb-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.553971 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-combined-ca-bundle\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.554006 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-config\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.554035 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-httpd-config\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.554088 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-public-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.655741 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-config\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.656554 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-httpd-config\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.656788 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-public-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.656872 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-internal-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.656896 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkd9s\" (UniqueName: \"kubernetes.io/projected/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-kube-api-access-xkd9s\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.656911 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-ovndb-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.656958 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-combined-ca-bundle\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.661860 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-internal-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.663002 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-combined-ca-bundle\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.663781 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-httpd-config\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.666886 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-config\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.667351 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-public-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.679943 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-ovndb-tls-certs\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.694626 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkd9s\" (UniqueName: \"kubernetes.io/projected/b539b449-5e16-4bb4-8931-ba6c9ad1df7d-kube-api-access-xkd9s\") pod \"neutron-55bc9b5c77-pkdzr\" (UID: \"b539b449-5e16-4bb4-8931-ba6c9ad1df7d\") " pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.707154 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="915a3de5-7d96-4d7d-83fa-96835dfdd0c1" path="/var/lib/kubelet/pods/915a3de5-7d96-4d7d-83fa-96835dfdd0c1/volumes" Oct 03 13:51:04 crc kubenswrapper[4861]: I1003 13:51:04.797153 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:05 crc kubenswrapper[4861]: I1003 13:51:05.158828 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b235907f-2bbf-4402-ac15-7c38c44a7461","Type":"ContainerStarted","Data":"31943b45a956b02a6ab0773b2a3fe2171422f6989c6c5f028fa28e27430e1e0f"} Oct 03 13:51:08 crc kubenswrapper[4861]: I1003 13:51:08.594268 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:51:08 crc kubenswrapper[4861]: I1003 13:51:08.594535 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:51:08 crc kubenswrapper[4861]: I1003 13:51:08.596799 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:51:08 crc kubenswrapper[4861]: I1003 13:51:08.722110 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podUID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 03 13:51:10 crc kubenswrapper[4861]: I1003 13:51:10.336463 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:11 crc kubenswrapper[4861]: I1003 13:51:10.395148 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-9ckx2"] Oct 03 13:51:11 crc kubenswrapper[4861]: I1003 13:51:10.395391 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerName="dnsmasq-dns" containerID="cri-o://5ba27c3db11bc950c81edbd68ecb2b8c39b5e401fba11e8b94a9974eac5ead94" gracePeriod=10 Oct 03 13:51:12 crc kubenswrapper[4861]: I1003 13:51:12.225969 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" event={"ID":"c53e054d-0317-469b-b94a-1d73bdfc5171","Type":"ContainerDied","Data":"5ba27c3db11bc950c81edbd68ecb2b8c39b5e401fba11e8b94a9974eac5ead94"} Oct 03 13:51:12 crc kubenswrapper[4861]: I1003 13:51:12.225893 4861 generic.go:334] "Generic (PLEG): container finished" podID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerID="5ba27c3db11bc950c81edbd68ecb2b8c39b5e401fba11e8b94a9974eac5ead94" exitCode=0 Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.398920 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.409418 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f6j5h" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.544526 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-config-data\") pod \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.544845 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-scripts\") pod \"7b148d26-3aac-44de-9776-c03b03c5fff2\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.544891 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-config-data\") pod \"7b148d26-3aac-44de-9776-c03b03c5fff2\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.544942 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-credential-keys\") pod \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545013 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-fernet-keys\") pod \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545049 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf99d\" (UniqueName: \"kubernetes.io/projected/dd37928f-4e28-4ff6-b3bf-5baa2941c432-kube-api-access-kf99d\") pod \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545070 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b148d26-3aac-44de-9776-c03b03c5fff2-logs\") pod \"7b148d26-3aac-44de-9776-c03b03c5fff2\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545094 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5l5f\" (UniqueName: \"kubernetes.io/projected/7b148d26-3aac-44de-9776-c03b03c5fff2-kube-api-access-x5l5f\") pod \"7b148d26-3aac-44de-9776-c03b03c5fff2\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545108 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-scripts\") pod \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545133 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-combined-ca-bundle\") pod \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\" (UID: \"dd37928f-4e28-4ff6-b3bf-5baa2941c432\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.545167 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-combined-ca-bundle\") pod \"7b148d26-3aac-44de-9776-c03b03c5fff2\" (UID: \"7b148d26-3aac-44de-9776-c03b03c5fff2\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.547407 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b148d26-3aac-44de-9776-c03b03c5fff2-logs" (OuterVolumeSpecName: "logs") pod "7b148d26-3aac-44de-9776-c03b03c5fff2" (UID: "7b148d26-3aac-44de-9776-c03b03c5fff2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.557030 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd37928f-4e28-4ff6-b3bf-5baa2941c432-kube-api-access-kf99d" (OuterVolumeSpecName: "kube-api-access-kf99d") pod "dd37928f-4e28-4ff6-b3bf-5baa2941c432" (UID: "dd37928f-4e28-4ff6-b3bf-5baa2941c432"). InnerVolumeSpecName "kube-api-access-kf99d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.602845 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dd37928f-4e28-4ff6-b3bf-5baa2941c432" (UID: "dd37928f-4e28-4ff6-b3bf-5baa2941c432"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.609783 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-scripts" (OuterVolumeSpecName: "scripts") pod "dd37928f-4e28-4ff6-b3bf-5baa2941c432" (UID: "dd37928f-4e28-4ff6-b3bf-5baa2941c432"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.614864 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b148d26-3aac-44de-9776-c03b03c5fff2-kube-api-access-x5l5f" (OuterVolumeSpecName: "kube-api-access-x5l5f") pod "7b148d26-3aac-44de-9776-c03b03c5fff2" (UID: "7b148d26-3aac-44de-9776-c03b03c5fff2"). InnerVolumeSpecName "kube-api-access-x5l5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.618741 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-scripts" (OuterVolumeSpecName: "scripts") pod "7b148d26-3aac-44de-9776-c03b03c5fff2" (UID: "7b148d26-3aac-44de-9776-c03b03c5fff2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.625332 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dd37928f-4e28-4ff6-b3bf-5baa2941c432" (UID: "dd37928f-4e28-4ff6-b3bf-5baa2941c432"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.642635 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-config-data" (OuterVolumeSpecName: "config-data") pod "dd37928f-4e28-4ff6-b3bf-5baa2941c432" (UID: "dd37928f-4e28-4ff6-b3bf-5baa2941c432"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647160 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647188 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647197 4861 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647206 4861 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647217 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf99d\" (UniqueName: \"kubernetes.io/projected/dd37928f-4e28-4ff6-b3bf-5baa2941c432-kube-api-access-kf99d\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647225 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b148d26-3aac-44de-9776-c03b03c5fff2-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647275 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5l5f\" (UniqueName: \"kubernetes.io/projected/7b148d26-3aac-44de-9776-c03b03c5fff2-kube-api-access-x5l5f\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.647284 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.650833 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd37928f-4e28-4ff6-b3bf-5baa2941c432" (UID: "dd37928f-4e28-4ff6-b3bf-5baa2941c432"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.654339 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-config-data" (OuterVolumeSpecName: "config-data") pod "7b148d26-3aac-44de-9776-c03b03c5fff2" (UID: "7b148d26-3aac-44de-9776-c03b03c5fff2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.686859 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b148d26-3aac-44de-9776-c03b03c5fff2" (UID: "7b148d26-3aac-44de-9776-c03b03c5fff2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.748489 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.748517 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd37928f-4e28-4ff6-b3bf-5baa2941c432-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.748768 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b148d26-3aac-44de-9776-c03b03c5fff2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.799188 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.970708 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-sb\") pod \"c53e054d-0317-469b-b94a-1d73bdfc5171\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.970771 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-nb\") pod \"c53e054d-0317-469b-b94a-1d73bdfc5171\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.970831 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-svc\") pod \"c53e054d-0317-469b-b94a-1d73bdfc5171\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.970855 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-swift-storage-0\") pod \"c53e054d-0317-469b-b94a-1d73bdfc5171\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.970890 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxmmv\" (UniqueName: \"kubernetes.io/projected/c53e054d-0317-469b-b94a-1d73bdfc5171-kube-api-access-hxmmv\") pod \"c53e054d-0317-469b-b94a-1d73bdfc5171\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.970956 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-config\") pod \"c53e054d-0317-469b-b94a-1d73bdfc5171\" (UID: \"c53e054d-0317-469b-b94a-1d73bdfc5171\") " Oct 03 13:51:14 crc kubenswrapper[4861]: I1003 13:51:14.975563 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c53e054d-0317-469b-b94a-1d73bdfc5171-kube-api-access-hxmmv" (OuterVolumeSpecName: "kube-api-access-hxmmv") pod "c53e054d-0317-469b-b94a-1d73bdfc5171" (UID: "c53e054d-0317-469b-b94a-1d73bdfc5171"). InnerVolumeSpecName "kube-api-access-hxmmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.073290 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxmmv\" (UniqueName: \"kubernetes.io/projected/c53e054d-0317-469b-b94a-1d73bdfc5171-kube-api-access-hxmmv\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.226419 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c53e054d-0317-469b-b94a-1d73bdfc5171" (UID: "c53e054d-0317-469b-b94a-1d73bdfc5171"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.246047 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c53e054d-0317-469b-b94a-1d73bdfc5171" (UID: "c53e054d-0317-469b-b94a-1d73bdfc5171"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.254013 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c53e054d-0317-469b-b94a-1d73bdfc5171" (UID: "c53e054d-0317-469b-b94a-1d73bdfc5171"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.282005 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-h4wcn" event={"ID":"3b0f621f-bdf0-4768-a764-0bc15e01faba","Type":"ContainerStarted","Data":"cbeb3106bef3beac18cf97956e4fc960ae1f5b838716bc6b7faee440906ea050"} Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.286799 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.286833 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.286844 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.288531 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f6j5h" event={"ID":"7b148d26-3aac-44de-9776-c03b03c5fff2","Type":"ContainerDied","Data":"c23f0bf64579fdf0da4abf0b38f3e665570769188eeba510391e64c898ce4aff"} Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.288578 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c23f0bf64579fdf0da4abf0b38f3e665570769188eeba510391e64c898ce4aff" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.288662 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f6j5h" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.297724 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" event={"ID":"c53e054d-0317-469b-b94a-1d73bdfc5171","Type":"ContainerDied","Data":"fc4ca55692b5e34690334ff4ab47a039dd5f7c6e8675268e7ae098f9af32092d"} Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.297798 4861 scope.go:117] "RemoveContainer" containerID="5ba27c3db11bc950c81edbd68ecb2b8c39b5e401fba11e8b94a9974eac5ead94" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.297988 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-9ckx2" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.320095 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c53e054d-0317-469b-b94a-1d73bdfc5171" (UID: "c53e054d-0317-469b-b94a-1d73bdfc5171"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.322870 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-h4wcn" podStartSLOduration=2.276051348 podStartE2EDuration="55.322853215s" podCreationTimestamp="2025-10-03 13:50:20 +0000 UTC" firstStartedPulling="2025-10-03 13:50:21.736971504 +0000 UTC m=+1135.734956551" lastFinishedPulling="2025-10-03 13:51:14.783773371 +0000 UTC m=+1188.781758418" observedRunningTime="2025-10-03 13:51:15.307272278 +0000 UTC m=+1189.305257345" watchObservedRunningTime="2025-10-03 13:51:15.322853215 +0000 UTC m=+1189.320838272" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.326506 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cmgtn" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.326799 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cmgtn" event={"ID":"dd37928f-4e28-4ff6-b3bf-5baa2941c432","Type":"ContainerDied","Data":"5323c325eb669e5021b2aa201247b05e1c3c1176a44ce2936facabe703119ac2"} Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.326843 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5323c325eb669e5021b2aa201247b05e1c3c1176a44ce2936facabe703119ac2" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.332327 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55bc9b5c77-pkdzr"] Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.365474 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-config" (OuterVolumeSpecName: "config") pod "c53e054d-0317-469b-b94a-1d73bdfc5171" (UID: "c53e054d-0317-469b-b94a-1d73bdfc5171"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.389644 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.390151 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c53e054d-0317-469b-b94a-1d73bdfc5171-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:15 crc kubenswrapper[4861]: W1003 13:51:15.397606 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb539b449_5e16_4bb4_8931_ba6c9ad1df7d.slice/crio-7f7c14bcabb5e57f54d389da178f4d9db180ac4466e0685826905832c12f9e7e WatchSource:0}: Error finding container 7f7c14bcabb5e57f54d389da178f4d9db180ac4466e0685826905832c12f9e7e: Status 404 returned error can't find the container with id 7f7c14bcabb5e57f54d389da178f4d9db180ac4466e0685826905832c12f9e7e Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.533668 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-d84968f5f-dw9pq"] Oct 03 13:51:15 crc kubenswrapper[4861]: E1003 13:51:15.546924 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b148d26-3aac-44de-9776-c03b03c5fff2" containerName="placement-db-sync" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.546951 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b148d26-3aac-44de-9776-c03b03c5fff2" containerName="placement-db-sync" Oct 03 13:51:15 crc kubenswrapper[4861]: E1003 13:51:15.546963 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd37928f-4e28-4ff6-b3bf-5baa2941c432" containerName="keystone-bootstrap" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.546970 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd37928f-4e28-4ff6-b3bf-5baa2941c432" containerName="keystone-bootstrap" Oct 03 13:51:15 crc kubenswrapper[4861]: E1003 13:51:15.546981 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerName="dnsmasq-dns" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.546986 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerName="dnsmasq-dns" Oct 03 13:51:15 crc kubenswrapper[4861]: E1003 13:51:15.547005 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerName="init" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.547011 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerName="init" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.547177 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" containerName="dnsmasq-dns" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.547194 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd37928f-4e28-4ff6-b3bf-5baa2941c432" containerName="keystone-bootstrap" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.547207 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b148d26-3aac-44de-9776-c03b03c5fff2" containerName="placement-db-sync" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.547858 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.559652 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-d84968f5f-dw9pq"] Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.567210 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.567433 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x97lk" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.567550 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.567697 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.567795 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.567890 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593323 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-combined-ca-bundle\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593359 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-internal-tls-certs\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593384 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-fernet-keys\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593425 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-config-data\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593483 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-credential-keys\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593508 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-public-tls-certs\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593564 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbhwf\" (UniqueName: \"kubernetes.io/projected/fbeb45d8-15f9-47b5-b6af-f578362eda62-kube-api-access-hbhwf\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.593610 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-scripts\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.675940 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c6f6cb4f6-xc49q"] Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.679734 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.683580 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.684020 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.685350 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.685515 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-tnpj6" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.689396 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.695939 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-scripts\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696005 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-combined-ca-bundle\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696032 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-internal-tls-certs\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696053 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-fernet-keys\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696089 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-config-data\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696133 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-credential-keys\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696150 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-public-tls-certs\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.696193 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbhwf\" (UniqueName: \"kubernetes.io/projected/fbeb45d8-15f9-47b5-b6af-f578362eda62-kube-api-access-hbhwf\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.706630 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-combined-ca-bundle\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.725223 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-config-data\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.727575 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c6f6cb4f6-xc49q"] Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.728120 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-fernet-keys\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.728524 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-credential-keys\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.741487 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbhwf\" (UniqueName: \"kubernetes.io/projected/fbeb45d8-15f9-47b5-b6af-f578362eda62-kube-api-access-hbhwf\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.743948 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-public-tls-certs\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.749010 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-internal-tls-certs\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.749600 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbeb45d8-15f9-47b5-b6af-f578362eda62-scripts\") pod \"keystone-d84968f5f-dw9pq\" (UID: \"fbeb45d8-15f9-47b5-b6af-f578362eda62\") " pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.799344 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-public-tls-certs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.799433 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-internal-tls-certs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.799700 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37f329ec-eb69-4d87-a22b-ace765cef57f-logs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.799759 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-combined-ca-bundle\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.799826 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-config-data\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.800621 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-scripts\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.800663 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-797zk\" (UniqueName: \"kubernetes.io/projected/37f329ec-eb69-4d87-a22b-ace765cef57f-kube-api-access-797zk\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.807871 4861 scope.go:117] "RemoveContainer" containerID="ea4ebdabcb7ddc81838062d39c51d82ce92031e139e7e6c84bd37400106f46dc" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.882066 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-9ckx2"] Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.893284 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-9ckx2"] Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.906981 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37f329ec-eb69-4d87-a22b-ace765cef57f-logs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.907048 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-combined-ca-bundle\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.907076 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-config-data\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.907129 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-scripts\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.907157 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-797zk\" (UniqueName: \"kubernetes.io/projected/37f329ec-eb69-4d87-a22b-ace765cef57f-kube-api-access-797zk\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.907320 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-public-tls-certs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.907357 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-internal-tls-certs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.912719 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37f329ec-eb69-4d87-a22b-ace765cef57f-logs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.929746 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-internal-tls-certs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.932741 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-config-data\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.934389 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-combined-ca-bundle\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.937169 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-public-tls-certs\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.938865 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f329ec-eb69-4d87-a22b-ace765cef57f-scripts\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:15 crc kubenswrapper[4861]: I1003 13:51:15.941261 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-797zk\" (UniqueName: \"kubernetes.io/projected/37f329ec-eb69-4d87-a22b-ace765cef57f-kube-api-access-797zk\") pod \"placement-c6f6cb4f6-xc49q\" (UID: \"37f329ec-eb69-4d87-a22b-ace765cef57f\") " pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.002940 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.134722 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.368555 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e179d8c8-2f07-4b5c-9db5-c26877356f86","Type":"ContainerStarted","Data":"8b5224a7e6863ed712c6aa8d296714089aaa6d9c8931092c695fc9b013d993dc"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.403153 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=16.403133288 podStartE2EDuration="16.403133288s" podCreationTimestamp="2025-10-03 13:51:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:16.401089684 +0000 UTC m=+1190.399074731" watchObservedRunningTime="2025-10-03 13:51:16.403133288 +0000 UTC m=+1190.401118335" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.428449 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b235907f-2bbf-4402-ac15-7c38c44a7461","Type":"ContainerStarted","Data":"344501de3bd1fcb02bbd1237b09f8d94d45792e9b54462601567e5eb4cdab6dd"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.452745 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=15.452727425 podStartE2EDuration="15.452727425s" podCreationTimestamp="2025-10-03 13:51:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:16.448994285 +0000 UTC m=+1190.446979342" watchObservedRunningTime="2025-10-03 13:51:16.452727425 +0000 UTC m=+1190.450712472" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.473587 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerStarted","Data":"fa00cbad7504839003d631c483cce5f8ce9c82d27ece44c2bd50e9fa241a8233"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.478863 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ksvcg" event={"ID":"456069ef-db45-4878-85d5-1e5001fa789e","Type":"ContainerStarted","Data":"18445312989afd8d0cf13f473f3e53bf8daf394310804e65cbb55ceef7c0116e"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.497511 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bc9b5c77-pkdzr" event={"ID":"b539b449-5e16-4bb4-8931-ba6c9ad1df7d","Type":"ContainerStarted","Data":"f956a045c405155f1ebd465888a796332b8e264efb599cd5956390bc97728ef2"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.497570 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bc9b5c77-pkdzr" event={"ID":"b539b449-5e16-4bb4-8931-ba6c9ad1df7d","Type":"ContainerStarted","Data":"fa5957cc103ba1abc97d2c475206896c565545f04d61aeae04d1b8dd27788bcc"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.497585 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bc9b5c77-pkdzr" event={"ID":"b539b449-5e16-4bb4-8931-ba6c9ad1df7d","Type":"ContainerStarted","Data":"7f7c14bcabb5e57f54d389da178f4d9db180ac4466e0685826905832c12f9e7e"} Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.498347 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.499449 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-ksvcg" podStartSLOduration=4.05394028 podStartE2EDuration="57.499436584s" podCreationTimestamp="2025-10-03 13:50:19 +0000 UTC" firstStartedPulling="2025-10-03 13:50:21.354467979 +0000 UTC m=+1135.352453016" lastFinishedPulling="2025-10-03 13:51:14.799964273 +0000 UTC m=+1188.797949320" observedRunningTime="2025-10-03 13:51:16.495661492 +0000 UTC m=+1190.493646549" watchObservedRunningTime="2025-10-03 13:51:16.499436584 +0000 UTC m=+1190.497421631" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.534146 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55bc9b5c77-pkdzr" podStartSLOduration=12.534130311 podStartE2EDuration="12.534130311s" podCreationTimestamp="2025-10-03 13:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:16.530759732 +0000 UTC m=+1190.528744799" watchObservedRunningTime="2025-10-03 13:51:16.534130311 +0000 UTC m=+1190.532115358" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.719532 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c53e054d-0317-469b-b94a-1d73bdfc5171" path="/var/lib/kubelet/pods/c53e054d-0317-469b-b94a-1d73bdfc5171/volumes" Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.759262 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-d84968f5f-dw9pq"] Oct 03 13:51:16 crc kubenswrapper[4861]: I1003 13:51:16.863092 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c6f6cb4f6-xc49q"] Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.521986 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c6f6cb4f6-xc49q" event={"ID":"37f329ec-eb69-4d87-a22b-ace765cef57f","Type":"ContainerStarted","Data":"012246ddd0310042adcffa9e3b1a08740835850518d7da7fc6dc99ff5f3f7105"} Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.522336 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c6f6cb4f6-xc49q" event={"ID":"37f329ec-eb69-4d87-a22b-ace765cef57f","Type":"ContainerStarted","Data":"4439f4e3ec72aa29054a6db90d349aeb5ec05f93659e96669f72b7a1c35f9518"} Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.522349 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c6f6cb4f6-xc49q" event={"ID":"37f329ec-eb69-4d87-a22b-ace765cef57f","Type":"ContainerStarted","Data":"f708cd1038319bec459f5c00f2974c30dcf7f731c869392c218265bd0bc47408"} Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.543590 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-d84968f5f-dw9pq" event={"ID":"fbeb45d8-15f9-47b5-b6af-f578362eda62","Type":"ContainerStarted","Data":"feec201120b67c996457b794b52c88b8a7cd61cb86e5bbd0fbb15eef4776f189"} Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.543669 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-d84968f5f-dw9pq" event={"ID":"fbeb45d8-15f9-47b5-b6af-f578362eda62","Type":"ContainerStarted","Data":"086a1b3e53ef5b144d9f6de1d592af856f3c99814e875c8a17b43ae6c36a5742"} Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.544948 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:17 crc kubenswrapper[4861]: I1003 13:51:17.573075 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-d84968f5f-dw9pq" podStartSLOduration=2.573039489 podStartE2EDuration="2.573039489s" podCreationTimestamp="2025-10-03 13:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:17.567520352 +0000 UTC m=+1191.565505399" watchObservedRunningTime="2025-10-03 13:51:17.573039489 +0000 UTC m=+1191.571024536" Oct 03 13:51:18 crc kubenswrapper[4861]: I1003 13:51:18.571617 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-c6f6cb4f6-xc49q" podStartSLOduration=3.571597158 podStartE2EDuration="3.571597158s" podCreationTimestamp="2025-10-03 13:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:18.567431697 +0000 UTC m=+1192.565416734" watchObservedRunningTime="2025-10-03 13:51:18.571597158 +0000 UTC m=+1192.569582215" Oct 03 13:51:18 crc kubenswrapper[4861]: I1003 13:51:18.597403 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:51:18 crc kubenswrapper[4861]: I1003 13:51:18.720005 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podUID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 03 13:51:19 crc kubenswrapper[4861]: I1003 13:51:19.559356 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:19 crc kubenswrapper[4861]: I1003 13:51:19.559395 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.375603 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.382437 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.421770 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.430850 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.576166 4861 generic.go:334] "Generic (PLEG): container finished" podID="3b0f621f-bdf0-4768-a764-0bc15e01faba" containerID="cbeb3106bef3beac18cf97956e4fc960ae1f5b838716bc6b7faee440906ea050" exitCode=0 Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.576260 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-h4wcn" event={"ID":"3b0f621f-bdf0-4768-a764-0bc15e01faba","Type":"ContainerDied","Data":"cbeb3106bef3beac18cf97956e4fc960ae1f5b838716bc6b7faee440906ea050"} Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.576639 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:51:21 crc kubenswrapper[4861]: I1003 13:51:21.576800 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:51:22 crc kubenswrapper[4861]: I1003 13:51:22.366136 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:22 crc kubenswrapper[4861]: I1003 13:51:22.366504 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:22 crc kubenswrapper[4861]: I1003 13:51:22.408836 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:22 crc kubenswrapper[4861]: I1003 13:51:22.436478 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:22 crc kubenswrapper[4861]: I1003 13:51:22.593375 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:22 crc kubenswrapper[4861]: I1003 13:51:22.593576 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.018537 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.107380 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2xpr\" (UniqueName: \"kubernetes.io/projected/3b0f621f-bdf0-4768-a764-0bc15e01faba-kube-api-access-b2xpr\") pod \"3b0f621f-bdf0-4768-a764-0bc15e01faba\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.107508 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-combined-ca-bundle\") pod \"3b0f621f-bdf0-4768-a764-0bc15e01faba\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.107582 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-db-sync-config-data\") pod \"3b0f621f-bdf0-4768-a764-0bc15e01faba\" (UID: \"3b0f621f-bdf0-4768-a764-0bc15e01faba\") " Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.122499 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3b0f621f-bdf0-4768-a764-0bc15e01faba" (UID: "3b0f621f-bdf0-4768-a764-0bc15e01faba"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.129681 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b0f621f-bdf0-4768-a764-0bc15e01faba-kube-api-access-b2xpr" (OuterVolumeSpecName: "kube-api-access-b2xpr") pod "3b0f621f-bdf0-4768-a764-0bc15e01faba" (UID: "3b0f621f-bdf0-4768-a764-0bc15e01faba"). InnerVolumeSpecName "kube-api-access-b2xpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.143938 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b0f621f-bdf0-4768-a764-0bc15e01faba" (UID: "3b0f621f-bdf0-4768-a764-0bc15e01faba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.208730 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2xpr\" (UniqueName: \"kubernetes.io/projected/3b0f621f-bdf0-4768-a764-0bc15e01faba-kube-api-access-b2xpr\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.208777 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.208791 4861 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b0f621f-bdf0-4768-a764-0bc15e01faba-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.624687 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-h4wcn" event={"ID":"3b0f621f-bdf0-4768-a764-0bc15e01faba","Type":"ContainerDied","Data":"6766699593e8f28d23efbc3694de8806e938f184a2e191f22dc80e2685b4da50"} Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.624928 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6766699593e8f28d23efbc3694de8806e938f184a2e191f22dc80e2685b4da50" Oct 03 13:51:25 crc kubenswrapper[4861]: I1003 13:51:25.624741 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-h4wcn" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.031164 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.031649 4861 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.039380 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.043899 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.044016 4861 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.054617 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.366046 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7b56bc9586-w989t"] Oct 03 13:51:26 crc kubenswrapper[4861]: E1003 13:51:26.366734 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b0f621f-bdf0-4768-a764-0bc15e01faba" containerName="barbican-db-sync" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.366752 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b0f621f-bdf0-4768-a764-0bc15e01faba" containerName="barbican-db-sync" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.366932 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b0f621f-bdf0-4768-a764-0bc15e01faba" containerName="barbican-db-sync" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.368530 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.379858 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rdm8r" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.380271 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.382543 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.427710 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-config-data\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.427779 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-combined-ca-bundle\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.427817 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m27q\" (UniqueName: \"kubernetes.io/projected/fc6983c6-4e21-49b6-a48a-f062bb5afd49-kube-api-access-8m27q\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.427848 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-config-data-custom\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.427913 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc6983c6-4e21-49b6-a48a-f062bb5afd49-logs\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.440294 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7b56bc9586-w989t"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.464535 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-79b8bfc47f-5jchw"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.465956 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.472137 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.482927 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-79b8bfc47f-5jchw"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529575 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-combined-ca-bundle\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529662 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m27q\" (UniqueName: \"kubernetes.io/projected/fc6983c6-4e21-49b6-a48a-f062bb5afd49-kube-api-access-8m27q\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529713 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-config-data-custom\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529787 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba4d9d03-a7d0-46ed-8429-008882213b57-logs\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529813 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg52m\" (UniqueName: \"kubernetes.io/projected/ba4d9d03-a7d0-46ed-8429-008882213b57-kube-api-access-mg52m\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529854 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc6983c6-4e21-49b6-a48a-f062bb5afd49-logs\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529898 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-config-data\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529942 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-combined-ca-bundle\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529961 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-config-data\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.529988 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-config-data-custom\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.536912 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5jkhs"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.538841 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc6983c6-4e21-49b6-a48a-f062bb5afd49-logs\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.550595 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-config-data\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.573805 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.576021 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-combined-ca-bundle\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.586852 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc6983c6-4e21-49b6-a48a-f062bb5afd49-config-data-custom\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.587393 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m27q\" (UniqueName: \"kubernetes.io/projected/fc6983c6-4e21-49b6-a48a-f062bb5afd49-kube-api-access-8m27q\") pod \"barbican-keystone-listener-7b56bc9586-w989t\" (UID: \"fc6983c6-4e21-49b6-a48a-f062bb5afd49\") " pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.600316 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5jkhs"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633207 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-combined-ca-bundle\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633251 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-config-data\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633270 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-config-data-custom\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633303 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633382 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba4d9d03-a7d0-46ed-8429-008882213b57-logs\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633400 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633418 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg52m\" (UniqueName: \"kubernetes.io/projected/ba4d9d03-a7d0-46ed-8429-008882213b57-kube-api-access-mg52m\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633435 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633467 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-config\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633489 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.633521 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txlmz\" (UniqueName: \"kubernetes.io/projected/70732663-446d-41a5-b0b2-94043cc3c069-kube-api-access-txlmz\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.649090 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-combined-ca-bundle\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.649382 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-config-data\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.649907 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba4d9d03-a7d0-46ed-8429-008882213b57-logs\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.655966 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.664326 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ksvcg" event={"ID":"456069ef-db45-4878-85d5-1e5001fa789e","Type":"ContainerDied","Data":"18445312989afd8d0cf13f473f3e53bf8daf394310804e65cbb55ceef7c0116e"} Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.665727 4861 generic.go:334] "Generic (PLEG): container finished" podID="456069ef-db45-4878-85d5-1e5001fa789e" containerID="18445312989afd8d0cf13f473f3e53bf8daf394310804e65cbb55ceef7c0116e" exitCode=0 Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.694110 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba4d9d03-a7d0-46ed-8429-008882213b57-config-data-custom\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.721475 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rdm8r" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.737060 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.741459 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg52m\" (UniqueName: \"kubernetes.io/projected/ba4d9d03-a7d0-46ed-8429-008882213b57-kube-api-access-mg52m\") pod \"barbican-worker-79b8bfc47f-5jchw\" (UID: \"ba4d9d03-a7d0-46ed-8429-008882213b57\") " pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.781194 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6cc4d4775d-sr7pz"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.788014 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.799306 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.810295 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.810379 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.810471 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-config\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.810513 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.810596 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txlmz\" (UniqueName: \"kubernetes.io/projected/70732663-446d-41a5-b0b2-94043cc3c069-kube-api-access-txlmz\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.810678 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.813111 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.816940 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cc4d4775d-sr7pz"] Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.822863 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-config\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.823729 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.842742 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.843597 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.859250 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-79b8bfc47f-5jchw" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.865563 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txlmz\" (UniqueName: \"kubernetes.io/projected/70732663-446d-41a5-b0b2-94043cc3c069-kube-api-access-txlmz\") pod \"dnsmasq-dns-75c8ddd69c-5jkhs\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.887275 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.979886 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data-custom\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.979928 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-combined-ca-bundle\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.979966 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-logs\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.979989 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcc9v\" (UniqueName: \"kubernetes.io/projected/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-kube-api-access-gcc9v\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:26 crc kubenswrapper[4861]: I1003 13:51:26.980043 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.081573 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.081723 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data-custom\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.081751 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-combined-ca-bundle\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.081787 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-logs\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.081822 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcc9v\" (UniqueName: \"kubernetes.io/projected/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-kube-api-access-gcc9v\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.084131 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-logs\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.086713 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-combined-ca-bundle\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.090248 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data-custom\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.091767 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.097730 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcc9v\" (UniqueName: \"kubernetes.io/projected/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-kube-api-access-gcc9v\") pod \"barbican-api-6cc4d4775d-sr7pz\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:27 crc kubenswrapper[4861]: I1003 13:51:27.176877 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.282074 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.411354 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-79b8bfc47f-5jchw"] Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.419520 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-config-data\") pod \"456069ef-db45-4878-85d5-1e5001fa789e\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.419722 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456069ef-db45-4878-85d5-1e5001fa789e-etc-machine-id\") pod \"456069ef-db45-4878-85d5-1e5001fa789e\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.419755 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-combined-ca-bundle\") pod \"456069ef-db45-4878-85d5-1e5001fa789e\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.419897 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-db-sync-config-data\") pod \"456069ef-db45-4878-85d5-1e5001fa789e\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.420044 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh644\" (UniqueName: \"kubernetes.io/projected/456069ef-db45-4878-85d5-1e5001fa789e-kube-api-access-fh644\") pod \"456069ef-db45-4878-85d5-1e5001fa789e\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.420541 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-scripts\") pod \"456069ef-db45-4878-85d5-1e5001fa789e\" (UID: \"456069ef-db45-4878-85d5-1e5001fa789e\") " Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.431857 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/456069ef-db45-4878-85d5-1e5001fa789e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "456069ef-db45-4878-85d5-1e5001fa789e" (UID: "456069ef-db45-4878-85d5-1e5001fa789e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.433004 4861 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456069ef-db45-4878-85d5-1e5001fa789e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.459643 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "456069ef-db45-4878-85d5-1e5001fa789e" (UID: "456069ef-db45-4878-85d5-1e5001fa789e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.469136 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-scripts" (OuterVolumeSpecName: "scripts") pod "456069ef-db45-4878-85d5-1e5001fa789e" (UID: "456069ef-db45-4878-85d5-1e5001fa789e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.469617 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/456069ef-db45-4878-85d5-1e5001fa789e-kube-api-access-fh644" (OuterVolumeSpecName: "kube-api-access-fh644") pod "456069ef-db45-4878-85d5-1e5001fa789e" (UID: "456069ef-db45-4878-85d5-1e5001fa789e"). InnerVolumeSpecName "kube-api-access-fh644". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.497629 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "456069ef-db45-4878-85d5-1e5001fa789e" (UID: "456069ef-db45-4878-85d5-1e5001fa789e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.535171 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.535391 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.535458 4861 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.535543 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh644\" (UniqueName: \"kubernetes.io/projected/456069ef-db45-4878-85d5-1e5001fa789e-kube-api-access-fh644\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.561389 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-config-data" (OuterVolumeSpecName: "config-data") pod "456069ef-db45-4878-85d5-1e5001fa789e" (UID: "456069ef-db45-4878-85d5-1e5001fa789e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.639122 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456069ef-db45-4878-85d5-1e5001fa789e-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.716318 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ksvcg" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.739012 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerStarted","Data":"3d0b6ce35a3cfc12628c6c8d1c1b0935696ab629cd75f33401901447c089dc5a"} Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.739048 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-79b8bfc47f-5jchw" event={"ID":"ba4d9d03-a7d0-46ed-8429-008882213b57","Type":"ContainerStarted","Data":"ff4f6d68acc49ab29fc3ae92c95981d80b7ead4717df7a63c6ca45217f942100"} Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.739062 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ksvcg" event={"ID":"456069ef-db45-4878-85d5-1e5001fa789e","Type":"ContainerDied","Data":"aeffa5a6f229c56885e9ed92ac1d4d72a4de1091e1d9a0e53f57b0e6da40df22"} Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.739072 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aeffa5a6f229c56885e9ed92ac1d4d72a4de1091e1d9a0e53f57b0e6da40df22" Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.744533 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5jkhs"] Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.791387 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7b56bc9586-w989t"] Oct 03 13:51:28 crc kubenswrapper[4861]: I1003 13:51:28.884456 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cc4d4775d-sr7pz"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.099006 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:29 crc kubenswrapper[4861]: E1003 13:51:29.099420 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="456069ef-db45-4878-85d5-1e5001fa789e" containerName="cinder-db-sync" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.099441 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="456069ef-db45-4878-85d5-1e5001fa789e" containerName="cinder-db-sync" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.099623 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="456069ef-db45-4878-85d5-1e5001fa789e" containerName="cinder-db-sync" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.100640 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.106354 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.106642 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.106842 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.106949 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zg4gc" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.136856 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.283729 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.283802 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-scripts\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.283832 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b8c0315-b124-4017-a6d1-20d73208248d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.283855 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.283879 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.283920 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dzw4\" (UniqueName: \"kubernetes.io/projected/4b8c0315-b124-4017-a6d1-20d73208248d-kube-api-access-6dzw4\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.292831 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5jkhs"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.365178 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-gd9mp"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.375372 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.385586 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.385655 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-scripts\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.385693 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b8c0315-b124-4017-a6d1-20d73208248d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.385724 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.385761 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.385818 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dzw4\" (UniqueName: \"kubernetes.io/projected/4b8c0315-b124-4017-a6d1-20d73208248d-kube-api-access-6dzw4\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.386614 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b8c0315-b124-4017-a6d1-20d73208248d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.399727 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.400995 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-scripts\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.404876 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.407786 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.431215 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dzw4\" (UniqueName: \"kubernetes.io/projected/4b8c0315-b124-4017-a6d1-20d73208248d-kube-api-access-6dzw4\") pod \"cinder-scheduler-0\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.438625 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.467740 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-gd9mp"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.485814 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.494758 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.499591 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.504899 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.504959 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.505037 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-config\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.505118 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl2sm\" (UniqueName: \"kubernetes.io/projected/709ebd79-e1bf-4e30-ba04-d1dc2c954398-kube-api-access-pl2sm\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.505276 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-svc\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.505306 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.559262 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606676 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-config\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606724 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606761 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs58v\" (UniqueName: \"kubernetes.io/projected/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-kube-api-access-rs58v\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606791 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl2sm\" (UniqueName: \"kubernetes.io/projected/709ebd79-e1bf-4e30-ba04-d1dc2c954398-kube-api-access-pl2sm\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606824 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-logs\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606857 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data-custom\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606886 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-svc\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.606985 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.607034 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.607138 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.607165 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-scripts\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.607197 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.607223 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.607700 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-svc\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.608164 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.608728 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-config\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.609091 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.609486 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.635659 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl2sm\" (UniqueName: \"kubernetes.io/projected/709ebd79-e1bf-4e30-ba04-d1dc2c954398-kube-api-access-pl2sm\") pod \"dnsmasq-dns-5784cf869f-gd9mp\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.709691 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-logs\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.709949 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data-custom\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710033 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710089 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710170 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-scripts\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710240 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710278 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs58v\" (UniqueName: \"kubernetes.io/projected/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-kube-api-access-rs58v\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710398 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-logs\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.710720 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.720260 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.721513 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.735721 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-scripts\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.747819 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data-custom\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.756490 4861 generic.go:334] "Generic (PLEG): container finished" podID="70732663-446d-41a5-b0b2-94043cc3c069" containerID="77009d3c3d9557835b0b4bf17a9edba7859154e4b77971a0627c3865f7760d03" exitCode=0 Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.756582 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" event={"ID":"70732663-446d-41a5-b0b2-94043cc3c069","Type":"ContainerDied","Data":"77009d3c3d9557835b0b4bf17a9edba7859154e4b77971a0627c3865f7760d03"} Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.756613 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" event={"ID":"70732663-446d-41a5-b0b2-94043cc3c069","Type":"ContainerStarted","Data":"994644a1666948879834d3bca63973b66bc2b51c599ff5c4364d7c897d216c0e"} Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.759201 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.779320 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs58v\" (UniqueName: \"kubernetes.io/projected/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-kube-api-access-rs58v\") pod \"cinder-api-0\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.800973 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cc4d4775d-sr7pz" event={"ID":"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d","Type":"ContainerStarted","Data":"31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed"} Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.801020 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cc4d4775d-sr7pz" event={"ID":"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d","Type":"ContainerStarted","Data":"3112e3f43b3fd5bf0553e579347eb0905f1a51671daa2702bda4151e27209d59"} Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.810804 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-central-agent" containerID="cri-o://5af7b441f7b61362145e5edc152e1d729bb66a3c297fbd1c3320b8cdd862a9f6" gracePeriod=30 Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.811127 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" event={"ID":"fc6983c6-4e21-49b6-a48a-f062bb5afd49","Type":"ContainerStarted","Data":"1212e428f6bbb6a9d5b684e6d51b4cc2e63bbe878a85021b638dfa421d5c05b0"} Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.811174 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.811214 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="proxy-httpd" containerID="cri-o://3d0b6ce35a3cfc12628c6c8d1c1b0935696ab629cd75f33401901447c089dc5a" gracePeriod=30 Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.811281 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="sg-core" containerID="cri-o://fa00cbad7504839003d631c483cce5f8ce9c82d27ece44c2bd50e9fa241a8233" gracePeriod=30 Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.811319 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-notification-agent" containerID="cri-o://e050f64840966798f0046b4fbc8341a52b6af025718938a28c8d770b92dac0b8" gracePeriod=30 Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.844662 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.891774 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.351061234 podStartE2EDuration="1m10.891754284s" podCreationTimestamp="2025-10-03 13:50:19 +0000 UTC" firstStartedPulling="2025-10-03 13:50:21.298748589 +0000 UTC m=+1135.296733636" lastFinishedPulling="2025-10-03 13:51:27.839441639 +0000 UTC m=+1201.837426686" observedRunningTime="2025-10-03 13:51:29.861759783 +0000 UTC m=+1203.859744830" watchObservedRunningTime="2025-10-03 13:51:29.891754284 +0000 UTC m=+1203.889739331" Oct 03 13:51:29 crc kubenswrapper[4861]: I1003 13:51:29.990239 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.269620 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.349817 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-swift-storage-0\") pod \"70732663-446d-41a5-b0b2-94043cc3c069\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.349962 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-nb\") pod \"70732663-446d-41a5-b0b2-94043cc3c069\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.350015 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-config\") pod \"70732663-446d-41a5-b0b2-94043cc3c069\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.350047 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txlmz\" (UniqueName: \"kubernetes.io/projected/70732663-446d-41a5-b0b2-94043cc3c069-kube-api-access-txlmz\") pod \"70732663-446d-41a5-b0b2-94043cc3c069\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.350084 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-sb\") pod \"70732663-446d-41a5-b0b2-94043cc3c069\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.350117 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-svc\") pod \"70732663-446d-41a5-b0b2-94043cc3c069\" (UID: \"70732663-446d-41a5-b0b2-94043cc3c069\") " Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.382371 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70732663-446d-41a5-b0b2-94043cc3c069-kube-api-access-txlmz" (OuterVolumeSpecName: "kube-api-access-txlmz") pod "70732663-446d-41a5-b0b2-94043cc3c069" (UID: "70732663-446d-41a5-b0b2-94043cc3c069"). InnerVolumeSpecName "kube-api-access-txlmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.397473 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "70732663-446d-41a5-b0b2-94043cc3c069" (UID: "70732663-446d-41a5-b0b2-94043cc3c069"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.397810 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "70732663-446d-41a5-b0b2-94043cc3c069" (UID: "70732663-446d-41a5-b0b2-94043cc3c069"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.403358 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "70732663-446d-41a5-b0b2-94043cc3c069" (UID: "70732663-446d-41a5-b0b2-94043cc3c069"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.425511 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-config" (OuterVolumeSpecName: "config") pod "70732663-446d-41a5-b0b2-94043cc3c069" (UID: "70732663-446d-41a5-b0b2-94043cc3c069"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.426068 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "70732663-446d-41a5-b0b2-94043cc3c069" (UID: "70732663-446d-41a5-b0b2-94043cc3c069"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.452739 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.452771 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.452783 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txlmz\" (UniqueName: \"kubernetes.io/projected/70732663-446d-41a5-b0b2-94043cc3c069-kube-api-access-txlmz\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.452796 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.452805 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.452814 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/70732663-446d-41a5-b0b2-94043cc3c069-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.512784 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.526659 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-gd9mp"] Oct 03 13:51:30 crc kubenswrapper[4861]: W1003 13:51:30.539784 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod709ebd79_e1bf_4e30_ba04_d1dc2c954398.slice/crio-495ec299cccb0683c3ce5b0c2b4461d4239ac3525e6d6fbd0d35965f4608fdcd WatchSource:0}: Error finding container 495ec299cccb0683c3ce5b0c2b4461d4239ac3525e6d6fbd0d35965f4608fdcd: Status 404 returned error can't find the container with id 495ec299cccb0683c3ce5b0c2b4461d4239ac3525e6d6fbd0d35965f4608fdcd Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.676590 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.848454 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8bbb0cd4-37cc-4039-a13f-55dbeb56c336","Type":"ContainerStarted","Data":"d7f40bd7eb297e0bf199c87b293b03b74c2d99ed18155b965f510c063f0cdb3e"} Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.885935 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" event={"ID":"709ebd79-e1bf-4e30-ba04-d1dc2c954398","Type":"ContainerStarted","Data":"495ec299cccb0683c3ce5b0c2b4461d4239ac3525e6d6fbd0d35965f4608fdcd"} Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.893323 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" event={"ID":"70732663-446d-41a5-b0b2-94043cc3c069","Type":"ContainerDied","Data":"994644a1666948879834d3bca63973b66bc2b51c599ff5c4364d7c897d216c0e"} Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.893376 4861 scope.go:117] "RemoveContainer" containerID="77009d3c3d9557835b0b4bf17a9edba7859154e4b77971a0627c3865f7760d03" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.893500 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5jkhs" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.901327 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cc4d4775d-sr7pz" event={"ID":"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d","Type":"ContainerStarted","Data":"4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d"} Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.902598 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.918325 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.919938 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b8c0315-b124-4017-a6d1-20d73208248d","Type":"ContainerStarted","Data":"b2d61d90d524903848cd7d1ac39c4409b1e795836f81508477eb12274869faf9"} Oct 03 13:51:30 crc kubenswrapper[4861]: I1003 13:51:30.946791 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6cc4d4775d-sr7pz" podStartSLOduration=4.946769434 podStartE2EDuration="4.946769434s" podCreationTimestamp="2025-10-03 13:51:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:30.928750362 +0000 UTC m=+1204.926735429" watchObservedRunningTime="2025-10-03 13:51:30.946769434 +0000 UTC m=+1204.944754481" Oct 03 13:51:31 crc kubenswrapper[4861]: I1003 13:51:31.020624 4861 generic.go:334] "Generic (PLEG): container finished" podID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerID="fa00cbad7504839003d631c483cce5f8ce9c82d27ece44c2bd50e9fa241a8233" exitCode=2 Oct 03 13:51:31 crc kubenswrapper[4861]: I1003 13:51:31.020657 4861 generic.go:334] "Generic (PLEG): container finished" podID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerID="5af7b441f7b61362145e5edc152e1d729bb66a3c297fbd1c3320b8cdd862a9f6" exitCode=0 Oct 03 13:51:31 crc kubenswrapper[4861]: I1003 13:51:31.020679 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerDied","Data":"fa00cbad7504839003d631c483cce5f8ce9c82d27ece44c2bd50e9fa241a8233"} Oct 03 13:51:31 crc kubenswrapper[4861]: I1003 13:51:31.020703 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerDied","Data":"5af7b441f7b61362145e5edc152e1d729bb66a3c297fbd1c3320b8cdd862a9f6"} Oct 03 13:51:31 crc kubenswrapper[4861]: I1003 13:51:31.079305 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5jkhs"] Oct 03 13:51:31 crc kubenswrapper[4861]: I1003 13:51:31.088967 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5jkhs"] Oct 03 13:51:32 crc kubenswrapper[4861]: I1003 13:51:32.053091 4861 generic.go:334] "Generic (PLEG): container finished" podID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerID="f1ccb5eedcb0d742d711b0df447cffb0ec2e1d8751a00163510ee76b1567fb11" exitCode=0 Oct 03 13:51:32 crc kubenswrapper[4861]: I1003 13:51:32.053471 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" event={"ID":"709ebd79-e1bf-4e30-ba04-d1dc2c954398","Type":"ContainerDied","Data":"f1ccb5eedcb0d742d711b0df447cffb0ec2e1d8751a00163510ee76b1567fb11"} Oct 03 13:51:32 crc kubenswrapper[4861]: I1003 13:51:32.702079 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70732663-446d-41a5-b0b2-94043cc3c069" path="/var/lib/kubelet/pods/70732663-446d-41a5-b0b2-94043cc3c069/volumes" Oct 03 13:51:32 crc kubenswrapper[4861]: I1003 13:51:32.790126 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.084765 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8bbb0cd4-37cc-4039-a13f-55dbeb56c336","Type":"ContainerStarted","Data":"2fcd19aff19f1d4e0d704be9a7d76d943ad588f19760e2576790dc8976267ff1"} Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.098088 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b8c0315-b124-4017-a6d1-20d73208248d","Type":"ContainerStarted","Data":"9aef2af41dc1bdbdfa5e161c2b6e76b1d5d368aeb9dadcca2ba76ead9e17e16f"} Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.101288 4861 generic.go:334] "Generic (PLEG): container finished" podID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerID="e050f64840966798f0046b4fbc8341a52b6af025718938a28c8d770b92dac0b8" exitCode=0 Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.101395 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerDied","Data":"e050f64840966798f0046b4fbc8341a52b6af025718938a28c8d770b92dac0b8"} Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.458201 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-64fc59b774-2zcpl"] Oct 03 13:51:33 crc kubenswrapper[4861]: E1003 13:51:33.458585 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70732663-446d-41a5-b0b2-94043cc3c069" containerName="init" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.458597 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="70732663-446d-41a5-b0b2-94043cc3c069" containerName="init" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.458809 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="70732663-446d-41a5-b0b2-94043cc3c069" containerName="init" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.459762 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.462623 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.462966 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.486609 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-64fc59b774-2zcpl"] Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545331 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-config-data-custom\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545663 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-internal-tls-certs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545695 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1005d0-0ea7-4d8a-bec8-445949aa9162-logs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545758 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-public-tls-certs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545777 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99jmb\" (UniqueName: \"kubernetes.io/projected/cf1005d0-0ea7-4d8a-bec8-445949aa9162-kube-api-access-99jmb\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545799 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-combined-ca-bundle\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.545818 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-config-data\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.629498 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.629590 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.630491 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"ddeaa56e9aa6c6ba89a75f8f405df5f2eebbad9b1e8dee7d0758a5aa07447be8"} pod="openstack/horizon-84cdb7b9dd-jhc2h" containerMessage="Container horizon failed startup probe, will be restarted" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.630539 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" containerID="cri-o://ddeaa56e9aa6c6ba89a75f8f405df5f2eebbad9b1e8dee7d0758a5aa07447be8" gracePeriod=30 Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648033 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-internal-tls-certs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648647 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1005d0-0ea7-4d8a-bec8-445949aa9162-logs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648787 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-public-tls-certs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648826 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99jmb\" (UniqueName: \"kubernetes.io/projected/cf1005d0-0ea7-4d8a-bec8-445949aa9162-kube-api-access-99jmb\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648852 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-combined-ca-bundle\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648920 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-config-data\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.648958 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-config-data-custom\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.650562 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1005d0-0ea7-4d8a-bec8-445949aa9162-logs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.651987 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-internal-tls-certs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.657915 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-config-data-custom\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.659712 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-combined-ca-bundle\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.662217 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-public-tls-certs\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.662147 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1005d0-0ea7-4d8a-bec8-445949aa9162-config-data\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.673754 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99jmb\" (UniqueName: \"kubernetes.io/projected/cf1005d0-0ea7-4d8a-bec8-445949aa9162-kube-api-access-99jmb\") pod \"barbican-api-64fc59b774-2zcpl\" (UID: \"cf1005d0-0ea7-4d8a-bec8-445949aa9162\") " pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.726448 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podUID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.726527 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.727413 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"834a5cec05a299ac4024b21688ce1b56239064614bf930e0fb726a9fa037c4fb"} pod="openstack/horizon-6c8cb9d9fb-bt6ls" containerMessage="Container horizon failed startup probe, will be restarted" Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.727517 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podUID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerName="horizon" containerID="cri-o://834a5cec05a299ac4024b21688ce1b56239064614bf930e0fb726a9fa037c4fb" gracePeriod=30 Oct 03 13:51:33 crc kubenswrapper[4861]: I1003 13:51:33.782785 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.158159 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" event={"ID":"709ebd79-e1bf-4e30-ba04-d1dc2c954398","Type":"ContainerStarted","Data":"79be2c834c62a38f57e7a14bc42bb562338318719b58c3ab42257952f4c464a8"} Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.159786 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.174727 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" event={"ID":"fc6983c6-4e21-49b6-a48a-f062bb5afd49","Type":"ContainerStarted","Data":"6e97c7b168972e7ff38c4ac6996dd8b2100fabdbcb98755a2945b222f01b17fb"} Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.203247 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" podStartSLOduration=5.203181772 podStartE2EDuration="5.203181772s" podCreationTimestamp="2025-10-03 13:51:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:34.191031778 +0000 UTC m=+1208.189016825" watchObservedRunningTime="2025-10-03 13:51:34.203181772 +0000 UTC m=+1208.201166839" Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.208955 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-79b8bfc47f-5jchw" event={"ID":"ba4d9d03-a7d0-46ed-8429-008882213b57","Type":"ContainerStarted","Data":"8747ef1f80698086816ea1833fbf2d8e7d932fdfd30ce837344823918f1dc8d2"} Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.588738 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-64fc59b774-2zcpl"] Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.856092 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55bc9b5c77-pkdzr" Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.916983 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c8f94f9d8-cqjv4"] Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.917295 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c8f94f9d8-cqjv4" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-api" containerID="cri-o://f21b64e6bb91b5b17fea39e443b78ac6d0095c8f6bb85a5736dc5043efa19172" gracePeriod=30 Oct 03 13:51:34 crc kubenswrapper[4861]: I1003 13:51:34.917454 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c8f94f9d8-cqjv4" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-httpd" containerID="cri-o://c9faed708ce407ef8583884f185ee075b0285f8f584ab17b840c681d39035d0a" gracePeriod=30 Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.246277 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" event={"ID":"fc6983c6-4e21-49b6-a48a-f062bb5afd49","Type":"ContainerStarted","Data":"2ac46e2f15373b925635186bd1cc5d1a868bcc4a135f870fc7b054589929f327"} Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.259145 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b8c0315-b124-4017-a6d1-20d73208248d","Type":"ContainerStarted","Data":"46ce99e4a35ec0cd2ce076c1e80db7e775152bc602cc54080dad3b8fe802eafa"} Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.261207 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-79b8bfc47f-5jchw" event={"ID":"ba4d9d03-a7d0-46ed-8429-008882213b57","Type":"ContainerStarted","Data":"baf2baf085690fd3dbd29178885da1448a9403b2323a3c4c1d04e120b66c1341"} Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.263904 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64fc59b774-2zcpl" event={"ID":"cf1005d0-0ea7-4d8a-bec8-445949aa9162","Type":"ContainerStarted","Data":"5454e45191795fa788eeb3818396eb84afa1f013bf5362092af8dcb477db53aa"} Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.263949 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64fc59b774-2zcpl" event={"ID":"cf1005d0-0ea7-4d8a-bec8-445949aa9162","Type":"ContainerStarted","Data":"973f42d6bffcc1787714af39416bae717b5665d74ec66052aed4d939a5725d97"} Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.276412 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7b56bc9586-w989t" podStartSLOduration=4.982453396 podStartE2EDuration="9.276395968s" podCreationTimestamp="2025-10-03 13:51:26 +0000 UTC" firstStartedPulling="2025-10-03 13:51:28.836319313 +0000 UTC m=+1202.834304350" lastFinishedPulling="2025-10-03 13:51:33.130261885 +0000 UTC m=+1207.128246922" observedRunningTime="2025-10-03 13:51:35.273365007 +0000 UTC m=+1209.271350054" watchObservedRunningTime="2025-10-03 13:51:35.276395968 +0000 UTC m=+1209.274381015" Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.285571 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api-log" containerID="cri-o://2fcd19aff19f1d4e0d704be9a7d76d943ad588f19760e2576790dc8976267ff1" gracePeriod=30 Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.285747 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api" containerID="cri-o://46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c" gracePeriod=30 Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.285791 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8bbb0cd4-37cc-4039-a13f-55dbeb56c336","Type":"ContainerStarted","Data":"46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c"} Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.285827 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.347869 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.281937419 podStartE2EDuration="6.347845019s" podCreationTimestamp="2025-10-03 13:51:29 +0000 UTC" firstStartedPulling="2025-10-03 13:51:30.031947523 +0000 UTC m=+1204.029932570" lastFinishedPulling="2025-10-03 13:51:31.097855123 +0000 UTC m=+1205.095840170" observedRunningTime="2025-10-03 13:51:35.308028283 +0000 UTC m=+1209.306013330" watchObservedRunningTime="2025-10-03 13:51:35.347845019 +0000 UTC m=+1209.345830066" Oct 03 13:51:35 crc kubenswrapper[4861]: I1003 13:51:35.355007 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-79b8bfc47f-5jchw" podStartSLOduration=4.658052592 podStartE2EDuration="9.354989259s" podCreationTimestamp="2025-10-03 13:51:26 +0000 UTC" firstStartedPulling="2025-10-03 13:51:28.431628763 +0000 UTC m=+1202.429613810" lastFinishedPulling="2025-10-03 13:51:33.12856543 +0000 UTC m=+1207.126550477" observedRunningTime="2025-10-03 13:51:35.341871189 +0000 UTC m=+1209.339856236" watchObservedRunningTime="2025-10-03 13:51:35.354989259 +0000 UTC m=+1209.352974316" Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.335332 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64fc59b774-2zcpl" event={"ID":"cf1005d0-0ea7-4d8a-bec8-445949aa9162","Type":"ContainerStarted","Data":"4bf85ee5544322d7d9c311a9a234cd79ca33603a916d0e8d93b85c5f04cd90cc"} Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.335653 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.335673 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.347656 4861 generic.go:334] "Generic (PLEG): container finished" podID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerID="2fcd19aff19f1d4e0d704be9a7d76d943ad588f19760e2576790dc8976267ff1" exitCode=143 Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.347740 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8bbb0cd4-37cc-4039-a13f-55dbeb56c336","Type":"ContainerDied","Data":"2fcd19aff19f1d4e0d704be9a7d76d943ad588f19760e2576790dc8976267ff1"} Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.361118 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-64fc59b774-2zcpl" podStartSLOduration=3.361101981 podStartE2EDuration="3.361101981s" podCreationTimestamp="2025-10-03 13:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:36.357829883 +0000 UTC m=+1210.355814940" watchObservedRunningTime="2025-10-03 13:51:36.361101981 +0000 UTC m=+1210.359087028" Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.361705 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.361699347 podStartE2EDuration="7.361699347s" podCreationTimestamp="2025-10-03 13:51:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:35.447747139 +0000 UTC m=+1209.445732196" watchObservedRunningTime="2025-10-03 13:51:36.361699347 +0000 UTC m=+1210.359684394" Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.389437 4861 generic.go:334] "Generic (PLEG): container finished" podID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerID="c9faed708ce407ef8583884f185ee075b0285f8f584ab17b840c681d39035d0a" exitCode=0 Oct 03 13:51:36 crc kubenswrapper[4861]: I1003 13:51:36.392377 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c8f94f9d8-cqjv4" event={"ID":"3cff73d0-7e13-497b-9c06-af6e078031c5","Type":"ContainerDied","Data":"c9faed708ce407ef8583884f185ee075b0285f8f584ab17b840c681d39035d0a"} Oct 03 13:51:38 crc kubenswrapper[4861]: I1003 13:51:38.800492 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:38 crc kubenswrapper[4861]: I1003 13:51:38.857992 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.417911 4861 generic.go:334] "Generic (PLEG): container finished" podID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerID="834a5cec05a299ac4024b21688ce1b56239064614bf930e0fb726a9fa037c4fb" exitCode=0 Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.418499 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8cb9d9fb-bt6ls" event={"ID":"81ec621b-cc30-4ab2-ae0e-bdd71629009f","Type":"ContainerDied","Data":"834a5cec05a299ac4024b21688ce1b56239064614bf930e0fb726a9fa037c4fb"} Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.440020 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.677738 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.761434 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.830481 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-hxkbs"] Oct 03 13:51:39 crc kubenswrapper[4861]: I1003 13:51:39.831552 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="dnsmasq-dns" containerID="cri-o://28f655bb301621944344ee708abddcef35e2658feabf7deeec559af2b704545a" gracePeriod=10 Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.459395 4861 generic.go:334] "Generic (PLEG): container finished" podID="c589e11a-4953-46ec-aeff-a83f6557421f" containerID="ddeaa56e9aa6c6ba89a75f8f405df5f2eebbad9b1e8dee7d0758a5aa07447be8" exitCode=0 Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.459705 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerDied","Data":"ddeaa56e9aa6c6ba89a75f8f405df5f2eebbad9b1e8dee7d0758a5aa07447be8"} Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.486608 4861 generic.go:334] "Generic (PLEG): container finished" podID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerID="f21b64e6bb91b5b17fea39e443b78ac6d0095c8f6bb85a5736dc5043efa19172" exitCode=0 Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.486684 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c8f94f9d8-cqjv4" event={"ID":"3cff73d0-7e13-497b-9c06-af6e078031c5","Type":"ContainerDied","Data":"f21b64e6bb91b5b17fea39e443b78ac6d0095c8f6bb85a5736dc5043efa19172"} Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.504425 4861 generic.go:334] "Generic (PLEG): container finished" podID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerID="28f655bb301621944344ee708abddcef35e2658feabf7deeec559af2b704545a" exitCode=0 Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.504480 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" event={"ID":"72839de1-20d4-42dd-b913-3a8cbfffa95d","Type":"ContainerDied","Data":"28f655bb301621944344ee708abddcef35e2658feabf7deeec559af2b704545a"} Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.516431 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8cb9d9fb-bt6ls" event={"ID":"81ec621b-cc30-4ab2-ae0e-bdd71629009f","Type":"ContainerStarted","Data":"e8f68d34958c2e026184791f46d176d6c566765e880f5ce4de79104495352445"} Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.575952 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.672640 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.728780 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-sb\") pod \"72839de1-20d4-42dd-b913-3a8cbfffa95d\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.728884 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-svc\") pod \"72839de1-20d4-42dd-b913-3a8cbfffa95d\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.728929 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtvhl\" (UniqueName: \"kubernetes.io/projected/72839de1-20d4-42dd-b913-3a8cbfffa95d-kube-api-access-wtvhl\") pod \"72839de1-20d4-42dd-b913-3a8cbfffa95d\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.729041 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-swift-storage-0\") pod \"72839de1-20d4-42dd-b913-3a8cbfffa95d\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.729060 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-config\") pod \"72839de1-20d4-42dd-b913-3a8cbfffa95d\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.729131 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-nb\") pod \"72839de1-20d4-42dd-b913-3a8cbfffa95d\" (UID: \"72839de1-20d4-42dd-b913-3a8cbfffa95d\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.743998 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72839de1-20d4-42dd-b913-3a8cbfffa95d-kube-api-access-wtvhl" (OuterVolumeSpecName: "kube-api-access-wtvhl") pod "72839de1-20d4-42dd-b913-3a8cbfffa95d" (UID: "72839de1-20d4-42dd-b913-3a8cbfffa95d"). InnerVolumeSpecName "kube-api-access-wtvhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.775434 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836018 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-combined-ca-bundle\") pod \"3cff73d0-7e13-497b-9c06-af6e078031c5\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836195 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-config\") pod \"3cff73d0-7e13-497b-9c06-af6e078031c5\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836241 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv2wc\" (UniqueName: \"kubernetes.io/projected/3cff73d0-7e13-497b-9c06-af6e078031c5-kube-api-access-dv2wc\") pod \"3cff73d0-7e13-497b-9c06-af6e078031c5\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836304 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-httpd-config\") pod \"3cff73d0-7e13-497b-9c06-af6e078031c5\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836331 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-ovndb-tls-certs\") pod \"3cff73d0-7e13-497b-9c06-af6e078031c5\" (UID: \"3cff73d0-7e13-497b-9c06-af6e078031c5\") " Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836669 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-config" (OuterVolumeSpecName: "config") pod "72839de1-20d4-42dd-b913-3a8cbfffa95d" (UID: "72839de1-20d4-42dd-b913-3a8cbfffa95d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.836744 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtvhl\" (UniqueName: \"kubernetes.io/projected/72839de1-20d4-42dd-b913-3a8cbfffa95d-kube-api-access-wtvhl\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.848976 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cff73d0-7e13-497b-9c06-af6e078031c5-kube-api-access-dv2wc" (OuterVolumeSpecName: "kube-api-access-dv2wc") pod "3cff73d0-7e13-497b-9c06-af6e078031c5" (UID: "3cff73d0-7e13-497b-9c06-af6e078031c5"). InnerVolumeSpecName "kube-api-access-dv2wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.851400 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "3cff73d0-7e13-497b-9c06-af6e078031c5" (UID: "3cff73d0-7e13-497b-9c06-af6e078031c5"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.854607 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72839de1-20d4-42dd-b913-3a8cbfffa95d" (UID: "72839de1-20d4-42dd-b913-3a8cbfffa95d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.863706 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72839de1-20d4-42dd-b913-3a8cbfffa95d" (UID: "72839de1-20d4-42dd-b913-3a8cbfffa95d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.896464 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72839de1-20d4-42dd-b913-3a8cbfffa95d" (UID: "72839de1-20d4-42dd-b913-3a8cbfffa95d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.918668 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "72839de1-20d4-42dd-b913-3a8cbfffa95d" (UID: "72839de1-20d4-42dd-b913-3a8cbfffa95d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.929367 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-config" (OuterVolumeSpecName: "config") pod "3cff73d0-7e13-497b-9c06-af6e078031c5" (UID: "3cff73d0-7e13-497b-9c06-af6e078031c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.934378 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3cff73d0-7e13-497b-9c06-af6e078031c5" (UID: "3cff73d0-7e13-497b-9c06-af6e078031c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.938348 4861 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.938705 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.938793 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.938892 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.938997 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.939075 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.939157 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.939253 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv2wc\" (UniqueName: \"kubernetes.io/projected/3cff73d0-7e13-497b-9c06-af6e078031c5-kube-api-access-dv2wc\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.939334 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72839de1-20d4-42dd-b913-3a8cbfffa95d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:40 crc kubenswrapper[4861]: I1003 13:51:40.976328 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "3cff73d0-7e13-497b-9c06-af6e078031c5" (UID: "3cff73d0-7e13-497b-9c06-af6e078031c5"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.040779 4861 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cff73d0-7e13-497b-9c06-af6e078031c5-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.525642 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerStarted","Data":"65774e93f7a18a88876b7eea12fb7794958ba568a544b650edc95fff9801a980"} Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.527983 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c8f94f9d8-cqjv4" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.528493 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c8f94f9d8-cqjv4" event={"ID":"3cff73d0-7e13-497b-9c06-af6e078031c5","Type":"ContainerDied","Data":"53376044f3ca8f1d96214df61f233567d8b93250ca23d1c92ff2148e87c969f1"} Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.528549 4861 scope.go:117] "RemoveContainer" containerID="c9faed708ce407ef8583884f185ee075b0285f8f584ab17b840c681d39035d0a" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.531565 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.541193 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" event={"ID":"72839de1-20d4-42dd-b913-3a8cbfffa95d","Type":"ContainerDied","Data":"76f4bbd36736a1e68e230154d455bc55b13c66e3813720a09463ced34a030cb1"} Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.544060 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="cinder-scheduler" containerID="cri-o://9aef2af41dc1bdbdfa5e161c2b6e76b1d5d368aeb9dadcca2ba76ead9e17e16f" gracePeriod=30 Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.544300 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="probe" containerID="cri-o://46ce99e4a35ec0cd2ce076c1e80db7e775152bc602cc54080dad3b8fe802eafa" gracePeriod=30 Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.594796 4861 scope.go:117] "RemoveContainer" containerID="f21b64e6bb91b5b17fea39e443b78ac6d0095c8f6bb85a5736dc5043efa19172" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.625186 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-hxkbs"] Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.632504 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-hxkbs"] Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.642408 4861 scope.go:117] "RemoveContainer" containerID="28f655bb301621944344ee708abddcef35e2658feabf7deeec559af2b704545a" Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.642533 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c8f94f9d8-cqjv4"] Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.648120 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7c8f94f9d8-cqjv4"] Oct 03 13:51:41 crc kubenswrapper[4861]: I1003 13:51:41.676947 4861 scope.go:117] "RemoveContainer" containerID="cc1e2c047c4feaffd19251d0c40578b631487a89cb37915e37a13c4c87155fb7" Oct 03 13:51:42 crc kubenswrapper[4861]: I1003 13:51:42.545771 4861 generic.go:334] "Generic (PLEG): container finished" podID="4b8c0315-b124-4017-a6d1-20d73208248d" containerID="46ce99e4a35ec0cd2ce076c1e80db7e775152bc602cc54080dad3b8fe802eafa" exitCode=0 Oct 03 13:51:42 crc kubenswrapper[4861]: I1003 13:51:42.545848 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b8c0315-b124-4017-a6d1-20d73208248d","Type":"ContainerDied","Data":"46ce99e4a35ec0cd2ce076c1e80db7e775152bc602cc54080dad3b8fe802eafa"} Oct 03 13:51:42 crc kubenswrapper[4861]: I1003 13:51:42.706907 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" path="/var/lib/kubelet/pods/3cff73d0-7e13-497b-9c06-af6e078031c5/volumes" Oct 03 13:51:42 crc kubenswrapper[4861]: I1003 13:51:42.707623 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" path="/var/lib/kubelet/pods/72839de1-20d4-42dd-b913-3a8cbfffa95d/volumes" Oct 03 13:51:43 crc kubenswrapper[4861]: I1003 13:51:43.343795 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 13:51:43 crc kubenswrapper[4861]: I1003 13:51:43.557526 4861 generic.go:334] "Generic (PLEG): container finished" podID="4b8c0315-b124-4017-a6d1-20d73208248d" containerID="9aef2af41dc1bdbdfa5e161c2b6e76b1d5d368aeb9dadcca2ba76ead9e17e16f" exitCode=0 Oct 03 13:51:43 crc kubenswrapper[4861]: I1003 13:51:43.557575 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b8c0315-b124-4017-a6d1-20d73208248d","Type":"ContainerDied","Data":"9aef2af41dc1bdbdfa5e161c2b6e76b1d5d368aeb9dadcca2ba76ead9e17e16f"} Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.075027 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.205798 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dzw4\" (UniqueName: \"kubernetes.io/projected/4b8c0315-b124-4017-a6d1-20d73208248d-kube-api-access-6dzw4\") pod \"4b8c0315-b124-4017-a6d1-20d73208248d\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.206111 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-combined-ca-bundle\") pod \"4b8c0315-b124-4017-a6d1-20d73208248d\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.206255 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-scripts\") pod \"4b8c0315-b124-4017-a6d1-20d73208248d\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.206287 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data\") pod \"4b8c0315-b124-4017-a6d1-20d73208248d\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.207046 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b8c0315-b124-4017-a6d1-20d73208248d-etc-machine-id\") pod \"4b8c0315-b124-4017-a6d1-20d73208248d\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.207081 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data-custom\") pod \"4b8c0315-b124-4017-a6d1-20d73208248d\" (UID: \"4b8c0315-b124-4017-a6d1-20d73208248d\") " Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.207130 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b8c0315-b124-4017-a6d1-20d73208248d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4b8c0315-b124-4017-a6d1-20d73208248d" (UID: "4b8c0315-b124-4017-a6d1-20d73208248d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.207709 4861 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b8c0315-b124-4017-a6d1-20d73208248d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.212546 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4b8c0315-b124-4017-a6d1-20d73208248d" (UID: "4b8c0315-b124-4017-a6d1-20d73208248d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.221338 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-scripts" (OuterVolumeSpecName: "scripts") pod "4b8c0315-b124-4017-a6d1-20d73208248d" (UID: "4b8c0315-b124-4017-a6d1-20d73208248d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.223287 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b8c0315-b124-4017-a6d1-20d73208248d-kube-api-access-6dzw4" (OuterVolumeSpecName: "kube-api-access-6dzw4") pod "4b8c0315-b124-4017-a6d1-20d73208248d" (UID: "4b8c0315-b124-4017-a6d1-20d73208248d"). InnerVolumeSpecName "kube-api-access-6dzw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.303610 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b8c0315-b124-4017-a6d1-20d73208248d" (UID: "4b8c0315-b124-4017-a6d1-20d73208248d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.310201 4861 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.310264 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dzw4\" (UniqueName: \"kubernetes.io/projected/4b8c0315-b124-4017-a6d1-20d73208248d-kube-api-access-6dzw4\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.310274 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.310282 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.383312 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data" (OuterVolumeSpecName: "config-data") pod "4b8c0315-b124-4017-a6d1-20d73208248d" (UID: "4b8c0315-b124-4017-a6d1-20d73208248d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.412439 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8c0315-b124-4017-a6d1-20d73208248d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.568871 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b8c0315-b124-4017-a6d1-20d73208248d","Type":"ContainerDied","Data":"b2d61d90d524903848cd7d1ac39c4409b1e795836f81508477eb12274869faf9"} Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.568923 4861 scope.go:117] "RemoveContainer" containerID="46ce99e4a35ec0cd2ce076c1e80db7e775152bc602cc54080dad3b8fe802eafa" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.568960 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.594728 4861 scope.go:117] "RemoveContainer" containerID="9aef2af41dc1bdbdfa5e161c2b6e76b1d5d368aeb9dadcca2ba76ead9e17e16f" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.615020 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.628040 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.640509 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:44 crc kubenswrapper[4861]: E1003 13:51:44.641069 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="cinder-scheduler" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641099 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="cinder-scheduler" Oct 03 13:51:44 crc kubenswrapper[4861]: E1003 13:51:44.641121 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="init" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641129 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="init" Oct 03 13:51:44 crc kubenswrapper[4861]: E1003 13:51:44.641160 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-httpd" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641170 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-httpd" Oct 03 13:51:44 crc kubenswrapper[4861]: E1003 13:51:44.641187 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="probe" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641196 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="probe" Oct 03 13:51:44 crc kubenswrapper[4861]: E1003 13:51:44.641212 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-api" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641220 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-api" Oct 03 13:51:44 crc kubenswrapper[4861]: E1003 13:51:44.641247 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="dnsmasq-dns" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641256 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="dnsmasq-dns" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641445 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-api" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641469 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="probe" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641490 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="dnsmasq-dns" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641500 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" containerName="cinder-scheduler" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.641514 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cff73d0-7e13-497b-9c06-af6e078031c5" containerName="neutron-httpd" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.642752 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.646344 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.706386 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b8c0315-b124-4017-a6d1-20d73208248d" path="/var/lib/kubelet/pods/4b8c0315-b124-4017-a6d1-20d73208248d/volumes" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.707108 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.717632 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rltvh\" (UniqueName: \"kubernetes.io/projected/8af097a4-c83f-4687-9804-fded6b1eb9ac-kube-api-access-rltvh\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.717667 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-scripts\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.717687 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.717721 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8af097a4-c83f-4687-9804-fded6b1eb9ac-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.717777 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.717844 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-config-data\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.819024 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-config-data\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.819080 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rltvh\" (UniqueName: \"kubernetes.io/projected/8af097a4-c83f-4687-9804-fded6b1eb9ac-kube-api-access-rltvh\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.819104 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-scripts\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.819131 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.819191 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8af097a4-c83f-4687-9804-fded6b1eb9ac-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.819264 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.820378 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8af097a4-c83f-4687-9804-fded6b1eb9ac-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.823703 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.830715 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-scripts\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.832405 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-config-data\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.841378 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af097a4-c83f-4687-9804-fded6b1eb9ac-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:44 crc kubenswrapper[4861]: I1003 13:51:44.846202 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rltvh\" (UniqueName: \"kubernetes.io/projected/8af097a4-c83f-4687-9804-fded6b1eb9ac-kube-api-access-rltvh\") pod \"cinder-scheduler-0\" (UID: \"8af097a4-c83f-4687-9804-fded6b1eb9ac\") " pod="openstack/cinder-scheduler-0" Oct 03 13:51:45 crc kubenswrapper[4861]: I1003 13:51:45.017388 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:51:45 crc kubenswrapper[4861]: I1003 13:51:45.338194 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-84b966f6c9-hxkbs" podUID="72839de1-20d4-42dd-b913-3a8cbfffa95d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.149:5353: i/o timeout" Oct 03 13:51:45 crc kubenswrapper[4861]: I1003 13:51:45.646547 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:51:45 crc kubenswrapper[4861]: I1003 13:51:45.804849 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.323331 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-64fc59b774-2zcpl" Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.414223 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6cc4d4775d-sr7pz"] Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.414464 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6cc4d4775d-sr7pz" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api-log" containerID="cri-o://31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed" gracePeriod=30 Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.414605 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6cc4d4775d-sr7pz" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api" containerID="cri-o://4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d" gracePeriod=30 Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.653188 4861 generic.go:334] "Generic (PLEG): container finished" podID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerID="31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed" exitCode=143 Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.653383 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cc4d4775d-sr7pz" event={"ID":"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d","Type":"ContainerDied","Data":"31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed"} Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.664750 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8af097a4-c83f-4687-9804-fded6b1eb9ac","Type":"ContainerStarted","Data":"d067411a0516fb610eaf0c52099ea9c4facaf5859aa50e7e84accf83a90fb2f0"} Oct 03 13:51:46 crc kubenswrapper[4861]: I1003 13:51:46.664800 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8af097a4-c83f-4687-9804-fded6b1eb9ac","Type":"ContainerStarted","Data":"897d352523c495b5df7aee157923341f2e618689b4cabf8b1af4ed99e3b3e03c"} Oct 03 13:51:47 crc kubenswrapper[4861]: I1003 13:51:47.674095 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8af097a4-c83f-4687-9804-fded6b1eb9ac","Type":"ContainerStarted","Data":"6282c047e65775280fee07f94a6e35776bac8e5aa38af748ec1bc047cf7cf63d"} Oct 03 13:51:47 crc kubenswrapper[4861]: I1003 13:51:47.702817 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.702794421 podStartE2EDuration="3.702794421s" podCreationTimestamp="2025-10-03 13:51:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:47.70053526 +0000 UTC m=+1221.698520307" watchObservedRunningTime="2025-10-03 13:51:47.702794421 +0000 UTC m=+1221.700779468" Oct 03 13:51:48 crc kubenswrapper[4861]: I1003 13:51:48.594448 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:51:48 crc kubenswrapper[4861]: I1003 13:51:48.594695 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:51:48 crc kubenswrapper[4861]: I1003 13:51:48.719527 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:51:48 crc kubenswrapper[4861]: I1003 13:51:48.720354 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:51:48 crc kubenswrapper[4861]: I1003 13:51:48.957737 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:48 crc kubenswrapper[4861]: I1003 13:51:48.970333 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-c6f6cb4f6-xc49q" Oct 03 13:51:49 crc kubenswrapper[4861]: I1003 13:51:49.691875 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6cc4d4775d-sr7pz" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:59294->10.217.0.159:9311: read: connection reset by peer" Oct 03 13:51:49 crc kubenswrapper[4861]: I1003 13:51:49.691984 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6cc4d4775d-sr7pz" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:59284->10.217.0.159:9311: read: connection reset by peer" Oct 03 13:51:49 crc kubenswrapper[4861]: I1003 13:51:49.886486 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.162:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.018202 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.054200 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.194534 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.336168 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcc9v\" (UniqueName: \"kubernetes.io/projected/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-kube-api-access-gcc9v\") pod \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.336267 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-logs\") pod \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.336366 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data-custom\") pod \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.336445 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data\") pod \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.336488 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-combined-ca-bundle\") pod \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\" (UID: \"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d\") " Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.338352 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-logs" (OuterVolumeSpecName: "logs") pod "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" (UID: "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.344422 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" (UID: "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.358694 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-kube-api-access-gcc9v" (OuterVolumeSpecName: "kube-api-access-gcc9v") pod "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" (UID: "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d"). InnerVolumeSpecName "kube-api-access-gcc9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.388483 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" (UID: "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.411523 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data" (OuterVolumeSpecName: "config-data") pod "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" (UID: "9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.439058 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcc9v\" (UniqueName: \"kubernetes.io/projected/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-kube-api-access-gcc9v\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.439197 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.439330 4861 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.439435 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.439538 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.704743 4861 generic.go:334] "Generic (PLEG): container finished" podID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerID="4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d" exitCode=0 Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.704790 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cc4d4775d-sr7pz" event={"ID":"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d","Type":"ContainerDied","Data":"4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d"} Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.704824 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cc4d4775d-sr7pz" event={"ID":"9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d","Type":"ContainerDied","Data":"3112e3f43b3fd5bf0553e579347eb0905f1a51671daa2702bda4151e27209d59"} Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.704850 4861 scope.go:117] "RemoveContainer" containerID="4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.705008 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cc4d4775d-sr7pz" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.735759 4861 scope.go:117] "RemoveContainer" containerID="31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.740697 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6cc4d4775d-sr7pz"] Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.751507 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6cc4d4775d-sr7pz"] Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.774716 4861 scope.go:117] "RemoveContainer" containerID="4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d" Oct 03 13:51:50 crc kubenswrapper[4861]: E1003 13:51:50.775277 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d\": container with ID starting with 4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d not found: ID does not exist" containerID="4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.775405 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d"} err="failed to get container status \"4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d\": rpc error: code = NotFound desc = could not find container \"4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d\": container with ID starting with 4a970d5f9d8784fca47a105003fe86d2ce572fa6ad9d5016f66f9cf1aa78427d not found: ID does not exist" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.775505 4861 scope.go:117] "RemoveContainer" containerID="31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed" Oct 03 13:51:50 crc kubenswrapper[4861]: E1003 13:51:50.775823 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed\": container with ID starting with 31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed not found: ID does not exist" containerID="31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.775860 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed"} err="failed to get container status \"31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed\": rpc error: code = NotFound desc = could not find container \"31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed\": container with ID starting with 31ca449731603a214dbde82a1583c8a6b51464a5072f75455b995c003de343ed not found: ID does not exist" Oct 03 13:51:50 crc kubenswrapper[4861]: I1003 13:51:50.951094 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-d84968f5f-dw9pq" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.191942 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 13:51:52 crc kubenswrapper[4861]: E1003 13:51:52.192656 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api-log" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.192670 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api-log" Oct 03 13:51:52 crc kubenswrapper[4861]: E1003 13:51:52.192690 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.192696 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.192904 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.192925 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" containerName="barbican-api-log" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.193520 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.198645 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.198709 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.199082 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-45wq8" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.208790 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.275831 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08030cdc-19c1-46f6-940c-48d493ce3880-combined-ca-bundle\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.275991 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08030cdc-19c1-46f6-940c-48d493ce3880-openstack-config-secret\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.276147 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb42s\" (UniqueName: \"kubernetes.io/projected/08030cdc-19c1-46f6-940c-48d493ce3880-kube-api-access-xb42s\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.276190 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08030cdc-19c1-46f6-940c-48d493ce3880-openstack-config\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.377457 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08030cdc-19c1-46f6-940c-48d493ce3880-openstack-config\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.377546 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08030cdc-19c1-46f6-940c-48d493ce3880-combined-ca-bundle\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.377580 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08030cdc-19c1-46f6-940c-48d493ce3880-openstack-config-secret\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.377659 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb42s\" (UniqueName: \"kubernetes.io/projected/08030cdc-19c1-46f6-940c-48d493ce3880-kube-api-access-xb42s\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.378185 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08030cdc-19c1-46f6-940c-48d493ce3880-openstack-config\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.385043 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08030cdc-19c1-46f6-940c-48d493ce3880-combined-ca-bundle\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.395118 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08030cdc-19c1-46f6-940c-48d493ce3880-openstack-config-secret\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.399551 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb42s\" (UniqueName: \"kubernetes.io/projected/08030cdc-19c1-46f6-940c-48d493ce3880-kube-api-access-xb42s\") pod \"openstackclient\" (UID: \"08030cdc-19c1-46f6-940c-48d493ce3880\") " pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.526298 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:51:52 crc kubenswrapper[4861]: I1003 13:51:52.701742 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d" path="/var/lib/kubelet/pods/9ab65d2f-a6e3-477c-81cf-d721fe6eaa9d/volumes" Oct 03 13:51:53 crc kubenswrapper[4861]: I1003 13:51:53.070418 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 13:51:53 crc kubenswrapper[4861]: W1003 13:51:53.076063 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08030cdc_19c1_46f6_940c_48d493ce3880.slice/crio-c9595e88fda3d35e505e5bde692260137796b3fcb539b97d3e01d59a9806dca8 WatchSource:0}: Error finding container c9595e88fda3d35e505e5bde692260137796b3fcb539b97d3e01d59a9806dca8: Status 404 returned error can't find the container with id c9595e88fda3d35e505e5bde692260137796b3fcb539b97d3e01d59a9806dca8 Oct 03 13:51:53 crc kubenswrapper[4861]: I1003 13:51:53.753942 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"08030cdc-19c1-46f6-940c-48d493ce3880","Type":"ContainerStarted","Data":"c9595e88fda3d35e505e5bde692260137796b3fcb539b97d3e01d59a9806dca8"} Oct 03 13:51:55 crc kubenswrapper[4861]: I1003 13:51:55.396511 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.354091 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5df8ff69f5-s22b2"] Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.372555 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5df8ff69f5-s22b2"] Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.372894 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.385628 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.385870 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.386048 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.499161 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-public-tls-certs\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.499622 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/71387314-2734-4ddf-ba27-c27c5cc05b0b-etc-swift\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.499769 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-internal-tls-certs\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.499900 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-combined-ca-bundle\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.500022 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71387314-2734-4ddf-ba27-c27c5cc05b0b-log-httpd\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.500150 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-config-data\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.500330 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4chm\" (UniqueName: \"kubernetes.io/projected/71387314-2734-4ddf-ba27-c27c5cc05b0b-kube-api-access-k4chm\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.500606 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71387314-2734-4ddf-ba27-c27c5cc05b0b-run-httpd\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.602423 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-public-tls-certs\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.602790 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/71387314-2734-4ddf-ba27-c27c5cc05b0b-etc-swift\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.602930 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-internal-tls-certs\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.603034 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-combined-ca-bundle\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.603155 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71387314-2734-4ddf-ba27-c27c5cc05b0b-log-httpd\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.603262 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-config-data\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.603371 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4chm\" (UniqueName: \"kubernetes.io/projected/71387314-2734-4ddf-ba27-c27c5cc05b0b-kube-api-access-k4chm\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.603564 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71387314-2734-4ddf-ba27-c27c5cc05b0b-run-httpd\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.604154 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71387314-2734-4ddf-ba27-c27c5cc05b0b-run-httpd\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.613021 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-public-tls-certs\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.613475 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/71387314-2734-4ddf-ba27-c27c5cc05b0b-etc-swift\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.615586 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/71387314-2734-4ddf-ba27-c27c5cc05b0b-log-httpd\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.616913 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-config-data\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.625637 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-internal-tls-certs\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.627936 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71387314-2734-4ddf-ba27-c27c5cc05b0b-combined-ca-bundle\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.643101 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4chm\" (UniqueName: \"kubernetes.io/projected/71387314-2734-4ddf-ba27-c27c5cc05b0b-kube-api-access-k4chm\") pod \"swift-proxy-5df8ff69f5-s22b2\" (UID: \"71387314-2734-4ddf-ba27-c27c5cc05b0b\") " pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:57 crc kubenswrapper[4861]: I1003 13:51:57.702957 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:58 crc kubenswrapper[4861]: I1003 13:51:58.344829 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5df8ff69f5-s22b2"] Oct 03 13:51:58 crc kubenswrapper[4861]: I1003 13:51:58.595477 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:51:58 crc kubenswrapper[4861]: I1003 13:51:58.723424 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podUID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 03 13:51:58 crc kubenswrapper[4861]: I1003 13:51:58.881444 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5df8ff69f5-s22b2" event={"ID":"71387314-2734-4ddf-ba27-c27c5cc05b0b","Type":"ContainerStarted","Data":"efec310ece656e4217d777fc443b600a78b8b2d1a049454e04f9b1383395a578"} Oct 03 13:51:58 crc kubenswrapper[4861]: I1003 13:51:58.881497 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5df8ff69f5-s22b2" event={"ID":"71387314-2734-4ddf-ba27-c27c5cc05b0b","Type":"ContainerStarted","Data":"4963a420300c5052556a3e618e657085d7a4f6412bfd79e985359a5f3147fef4"} Oct 03 13:51:59 crc kubenswrapper[4861]: I1003 13:51:59.899385 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5df8ff69f5-s22b2" event={"ID":"71387314-2734-4ddf-ba27-c27c5cc05b0b","Type":"ContainerStarted","Data":"e7705f8372583d2c358727c5a96dec5b80189ce8e4ea0bac95eadb2fc83c7117"} Oct 03 13:51:59 crc kubenswrapper[4861]: I1003 13:51:59.900089 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:59 crc kubenswrapper[4861]: I1003 13:51:59.900120 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:51:59 crc kubenswrapper[4861]: I1003 13:51:59.941599 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5df8ff69f5-s22b2" podStartSLOduration=2.941577979 podStartE2EDuration="2.941577979s" podCreationTimestamp="2025-10-03 13:51:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:51:59.924051534 +0000 UTC m=+1233.922036581" watchObservedRunningTime="2025-10-03 13:51:59.941577979 +0000 UTC m=+1233.939563026" Oct 03 13:52:00 crc kubenswrapper[4861]: I1003 13:52:00.915687 4861 generic.go:334] "Generic (PLEG): container finished" podID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerID="3d0b6ce35a3cfc12628c6c8d1c1b0935696ab629cd75f33401901447c089dc5a" exitCode=137 Oct 03 13:52:00 crc kubenswrapper[4861]: I1003 13:52:00.915854 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerDied","Data":"3d0b6ce35a3cfc12628c6c8d1c1b0935696ab629cd75f33401901447c089dc5a"} Oct 03 13:52:03 crc kubenswrapper[4861]: I1003 13:52:03.912006 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-vwnhz"] Oct 03 13:52:03 crc kubenswrapper[4861]: I1003 13:52:03.913448 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:03 crc kubenswrapper[4861]: I1003 13:52:03.926744 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vwnhz"] Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.010251 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-nrf7f"] Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.011599 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.022615 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nrf7f"] Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.053543 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f8kp\" (UniqueName: \"kubernetes.io/projected/741f5e11-d2d0-405c-9826-929d9b2b072b-kube-api-access-7f8kp\") pod \"nova-api-db-create-vwnhz\" (UID: \"741f5e11-d2d0-405c-9826-929d9b2b072b\") " pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.155258 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t74cx\" (UniqueName: \"kubernetes.io/projected/de3aa019-3497-44f8-a749-0f04313edd2a-kube-api-access-t74cx\") pod \"nova-cell0-db-create-nrf7f\" (UID: \"de3aa019-3497-44f8-a749-0f04313edd2a\") " pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.155462 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f8kp\" (UniqueName: \"kubernetes.io/projected/741f5e11-d2d0-405c-9826-929d9b2b072b-kube-api-access-7f8kp\") pod \"nova-api-db-create-vwnhz\" (UID: \"741f5e11-d2d0-405c-9826-929d9b2b072b\") " pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.179803 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f8kp\" (UniqueName: \"kubernetes.io/projected/741f5e11-d2d0-405c-9826-929d9b2b072b-kube-api-access-7f8kp\") pod \"nova-api-db-create-vwnhz\" (UID: \"741f5e11-d2d0-405c-9826-929d9b2b072b\") " pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.227467 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-mvb4f"] Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.228698 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.233607 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.252789 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-mvb4f"] Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.256840 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t74cx\" (UniqueName: \"kubernetes.io/projected/de3aa019-3497-44f8-a749-0f04313edd2a-kube-api-access-t74cx\") pod \"nova-cell0-db-create-nrf7f\" (UID: \"de3aa019-3497-44f8-a749-0f04313edd2a\") " pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.277442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t74cx\" (UniqueName: \"kubernetes.io/projected/de3aa019-3497-44f8-a749-0f04313edd2a-kube-api-access-t74cx\") pod \"nova-cell0-db-create-nrf7f\" (UID: \"de3aa019-3497-44f8-a749-0f04313edd2a\") " pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.330660 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.358298 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpp9n\" (UniqueName: \"kubernetes.io/projected/b99e855f-4dbf-498a-b942-e22c86d392d2-kube-api-access-wpp9n\") pod \"nova-cell1-db-create-mvb4f\" (UID: \"b99e855f-4dbf-498a-b942-e22c86d392d2\") " pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.460002 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpp9n\" (UniqueName: \"kubernetes.io/projected/b99e855f-4dbf-498a-b942-e22c86d392d2-kube-api-access-wpp9n\") pod \"nova-cell1-db-create-mvb4f\" (UID: \"b99e855f-4dbf-498a-b942-e22c86d392d2\") " pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.478616 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpp9n\" (UniqueName: \"kubernetes.io/projected/b99e855f-4dbf-498a-b942-e22c86d392d2-kube-api-access-wpp9n\") pod \"nova-cell1-db-create-mvb4f\" (UID: \"b99e855f-4dbf-498a-b942-e22c86d392d2\") " pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:04 crc kubenswrapper[4861]: I1003 13:52:04.554096 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:05 crc kubenswrapper[4861]: E1003 13:52:05.566715 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bbb0cd4_37cc_4039_a13f_55dbeb56c336.slice/crio-conmon-46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bbb0cd4_37cc_4039_a13f_55dbeb56c336.slice/crio-46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c.scope\": RecentStats: unable to find data in memory cache]" Oct 03 13:52:06 crc kubenswrapper[4861]: I1003 13:52:06.021393 4861 generic.go:334] "Generic (PLEG): container finished" podID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerID="46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c" exitCode=137 Oct 03 13:52:06 crc kubenswrapper[4861]: I1003 13:52:06.021786 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8bbb0cd4-37cc-4039-a13f-55dbeb56c336","Type":"ContainerDied","Data":"46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c"} Oct 03 13:52:06 crc kubenswrapper[4861]: I1003 13:52:06.606648 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:52:06 crc kubenswrapper[4861]: I1003 13:52:06.606877 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-log" containerID="cri-o://a55d2cf51b6106dad51dd75123df347fa3f197c41aedc1bac6fa6e18ca480564" gracePeriod=30 Oct 03 13:52:06 crc kubenswrapper[4861]: I1003 13:52:06.606945 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-httpd" containerID="cri-o://8b5224a7e6863ed712c6aa8d296714089aaa6d9c8931092c695fc9b013d993dc" gracePeriod=30 Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.053406 4861 generic.go:334] "Generic (PLEG): container finished" podID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerID="a55d2cf51b6106dad51dd75123df347fa3f197c41aedc1bac6fa6e18ca480564" exitCode=143 Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.053720 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e179d8c8-2f07-4b5c-9db5-c26877356f86","Type":"ContainerDied","Data":"a55d2cf51b6106dad51dd75123df347fa3f197c41aedc1bac6fa6e18ca480564"} Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.574524 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.574794 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-log" containerID="cri-o://31943b45a956b02a6ab0773b2a3fe2171422f6989c6c5f028fa28e27430e1e0f" gracePeriod=30 Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.574940 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-httpd" containerID="cri-o://344501de3bd1fcb02bbd1237b09f8d94d45792e9b54462601567e5eb4cdab6dd" gracePeriod=30 Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.710608 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:52:07 crc kubenswrapper[4861]: I1003 13:52:07.718930 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5df8ff69f5-s22b2" Oct 03 13:52:08 crc kubenswrapper[4861]: I1003 13:52:08.068194 4861 generic.go:334] "Generic (PLEG): container finished" podID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerID="31943b45a956b02a6ab0773b2a3fe2171422f6989c6c5f028fa28e27430e1e0f" exitCode=143 Oct 03 13:52:08 crc kubenswrapper[4861]: I1003 13:52:08.068983 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b235907f-2bbf-4402-ac15-7c38c44a7461","Type":"ContainerDied","Data":"31943b45a956b02a6ab0773b2a3fe2171422f6989c6c5f028fa28e27430e1e0f"} Oct 03 13:52:08 crc kubenswrapper[4861]: I1003 13:52:08.595014 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:52:08 crc kubenswrapper[4861]: I1003 13:52:08.720302 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6c8cb9d9fb-bt6ls" podUID="81ec621b-cc30-4ab2-ae0e-bdd71629009f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.767657 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vwnhz"] Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.791530 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.901082 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.918897 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.918991 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data-custom\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.919033 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-scripts\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.919110 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-logs\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.919164 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-etc-machine-id\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.919275 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-combined-ca-bundle\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.919300 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs58v\" (UniqueName: \"kubernetes.io/projected/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-kube-api-access-rs58v\") pod \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\" (UID: \"8bbb0cd4-37cc-4039-a13f-55dbeb56c336\") " Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.937906 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-kube-api-access-rs58v" (OuterVolumeSpecName: "kube-api-access-rs58v") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "kube-api-access-rs58v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.938334 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-logs" (OuterVolumeSpecName: "logs") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.938370 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:52:09 crc kubenswrapper[4861]: I1003 13:52:09.961821 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-scripts" (OuterVolumeSpecName: "scripts") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.001909 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.008860 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.021593 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-scripts\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.021707 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-combined-ca-bundle\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.021799 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-sg-core-conf-yaml\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.021862 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-run-httpd\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.021989 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-config-data\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022037 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-log-httpd\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022087 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2psg2\" (UniqueName: \"kubernetes.io/projected/3413d18c-4d35-4962-bc3f-b6750860b13d-kube-api-access-2psg2\") pod \"3413d18c-4d35-4962-bc3f-b6750860b13d\" (UID: \"3413d18c-4d35-4962-bc3f-b6750860b13d\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022609 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022661 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022673 4861 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022684 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022693 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs58v\" (UniqueName: \"kubernetes.io/projected/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-kube-api-access-rs58v\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.022700 4861 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.025318 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.025619 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.033157 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3413d18c-4d35-4962-bc3f-b6750860b13d-kube-api-access-2psg2" (OuterVolumeSpecName: "kube-api-access-2psg2") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "kube-api-access-2psg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.037160 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-scripts" (OuterVolumeSpecName: "scripts") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.125033 4861 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.125053 4861 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3413d18c-4d35-4962-bc3f-b6750860b13d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.125062 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2psg2\" (UniqueName: \"kubernetes.io/projected/3413d18c-4d35-4962-bc3f-b6750860b13d-kube-api-access-2psg2\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.125073 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.142514 4861 generic.go:334] "Generic (PLEG): container finished" podID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerID="8b5224a7e6863ed712c6aa8d296714089aaa6d9c8931092c695fc9b013d993dc" exitCode=0 Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.142572 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e179d8c8-2f07-4b5c-9db5-c26877356f86","Type":"ContainerDied","Data":"8b5224a7e6863ed712c6aa8d296714089aaa6d9c8931092c695fc9b013d993dc"} Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.176484 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nrf7f"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.187968 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3413d18c-4d35-4962-bc3f-b6750860b13d","Type":"ContainerDied","Data":"f68b032e7f1f7f171b639a9e37a9dcddb60cf162b4329e550892ea411c45e574"} Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.188036 4861 scope.go:117] "RemoveContainer" containerID="3d0b6ce35a3cfc12628c6c8d1c1b0935696ab629cd75f33401901447c089dc5a" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.188196 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.189274 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-mvb4f"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.194444 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.199122 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data" (OuterVolumeSpecName: "config-data") pod "8bbb0cd4-37cc-4039-a13f-55dbeb56c336" (UID: "8bbb0cd4-37cc-4039-a13f-55dbeb56c336"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.214050 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"08030cdc-19c1-46f6-940c-48d493ce3880","Type":"ContainerStarted","Data":"a698b39faba03cc2e4fc8664e4f5a8c57431a5a83b2ed7cd707b0a0ce39edd81"} Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.227860 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vwnhz" event={"ID":"741f5e11-d2d0-405c-9826-929d9b2b072b","Type":"ContainerStarted","Data":"b0c554587940b28ee6a6edf3562d17c03bed67508391a458b4b7c05cef75d2d8"} Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.230410 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbb0cd4-37cc-4039-a13f-55dbeb56c336-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.230432 4861 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.230561 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8bbb0cd4-37cc-4039-a13f-55dbeb56c336","Type":"ContainerDied","Data":"d7f40bd7eb297e0bf199c87b293b03b74c2d99ed18155b965f510c063f0cdb3e"} Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.230639 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.271832 4861 scope.go:117] "RemoveContainer" containerID="fa00cbad7504839003d631c483cce5f8ce9c82d27ece44c2bd50e9fa241a8233" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.287922 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.194080298 podStartE2EDuration="18.287903381s" podCreationTimestamp="2025-10-03 13:51:52 +0000 UTC" firstStartedPulling="2025-10-03 13:51:53.077753375 +0000 UTC m=+1227.075738422" lastFinishedPulling="2025-10-03 13:52:09.171576458 +0000 UTC m=+1243.169561505" observedRunningTime="2025-10-03 13:52:10.239548195 +0000 UTC m=+1244.237533242" watchObservedRunningTime="2025-10-03 13:52:10.287903381 +0000 UTC m=+1244.285888428" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.376697 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.387804 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.399513 4861 scope.go:117] "RemoveContainer" containerID="e050f64840966798f0046b4fbc8341a52b6af025718938a28c8d770b92dac0b8" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.410928 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435304 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.435744 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="sg-core" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435765 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="sg-core" Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.435781 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-central-agent" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435790 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-central-agent" Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.435818 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435826 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api" Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.435849 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-notification-agent" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435855 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-notification-agent" Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.435865 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api-log" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435871 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api-log" Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.435884 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="proxy-httpd" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.435892 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="proxy-httpd" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.436058 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-notification-agent" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.436081 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.436094 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="ceilometer-central-agent" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.436103 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="sg-core" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.436114 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" containerName="proxy-httpd" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.436125 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" containerName="cinder-api-log" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.437651 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.440053 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.440395 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.440510 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.440717 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.458160 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.508515 4861 scope.go:117] "RemoveContainer" containerID="5af7b441f7b61362145e5edc152e1d729bb66a3c297fbd1c3320b8cdd862a9f6" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.525993 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-config-data" (OuterVolumeSpecName: "config-data") pod "3413d18c-4d35-4962-bc3f-b6750860b13d" (UID: "3413d18c-4d35-4962-bc3f-b6750860b13d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542253 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjv6f\" (UniqueName: \"kubernetes.io/projected/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-kube-api-access-rjv6f\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542318 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-config-data\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542414 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542443 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-logs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542488 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542512 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-config-data-custom\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542541 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-scripts\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542634 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.542687 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3413d18c-4d35-4962-bc3f-b6750860b13d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.549539 4861 scope.go:117] "RemoveContainer" containerID="46749097649a38a960d20044f79fc12eb2206cae0c185841680247191b6ece6c" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.606542 4861 scope.go:117] "RemoveContainer" containerID="2fcd19aff19f1d4e0d704be9a7d76d943ad588f19760e2576790dc8976267ff1" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653667 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-config-data\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653798 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653824 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-logs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653876 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653898 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-config-data-custom\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653917 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.653968 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-scripts\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.654004 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.654055 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjv6f\" (UniqueName: \"kubernetes.io/projected/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-kube-api-access-rjv6f\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.661572 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-config-data\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.661648 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.662251 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-config-data-custom\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.666666 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-logs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.671893 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-scripts\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.673483 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.676408 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjv6f\" (UniqueName: \"kubernetes.io/projected/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-kube-api-access-rjv6f\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.677516 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.682341 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.691993 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8f27cb-a40f-4ab0-b709-4abad3ff72bb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"de8f27cb-a40f-4ab0-b709-4abad3ff72bb\") " pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.698276 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bbb0cd4-37cc-4039-a13f-55dbeb56c336" path="/var/lib/kubelet/pods/8bbb0cd4-37cc-4039-a13f-55dbeb56c336/volumes" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756281 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-logs\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756352 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-public-tls-certs\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756389 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-config-data\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756424 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-combined-ca-bundle\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756454 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-httpd-run\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756559 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvb6z\" (UniqueName: \"kubernetes.io/projected/e179d8c8-2f07-4b5c-9db5-c26877356f86-kube-api-access-xvb6z\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756688 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.756754 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-scripts\") pod \"e179d8c8-2f07-4b5c-9db5-c26877356f86\" (UID: \"e179d8c8-2f07-4b5c-9db5-c26877356f86\") " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.757121 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-logs" (OuterVolumeSpecName: "logs") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.757686 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.758081 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.763069 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.765665 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-scripts" (OuterVolumeSpecName: "scripts") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.766021 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e179d8c8-2f07-4b5c-9db5-c26877356f86-kube-api-access-xvb6z" (OuterVolumeSpecName: "kube-api-access-xvb6z") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "kube-api-access-xvb6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.803702 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.863058 4861 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e179d8c8-2f07-4b5c-9db5-c26877356f86-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.863091 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvb6z\" (UniqueName: \"kubernetes.io/projected/e179d8c8-2f07-4b5c-9db5-c26877356f86-kube-api-access-xvb6z\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.863123 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.863131 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.892323 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-config-data" (OuterVolumeSpecName: "config-data") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.892386 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.895371 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.926338 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.958545 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.964379 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.964406 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.964416 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.985450 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.985903 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-httpd" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.985916 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-httpd" Oct 03 13:52:10 crc kubenswrapper[4861]: E1003 13:52:10.985935 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-log" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.985941 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-log" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.986083 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-httpd" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.986112 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" containerName="glance-log" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.986447 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e179d8c8-2f07-4b5c-9db5-c26877356f86" (UID: "e179d8c8-2f07-4b5c-9db5-c26877356f86"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.987676 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.990405 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.990921 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:52:10 crc kubenswrapper[4861]: I1003 13:52:10.998484 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067192 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-scripts\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067291 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067328 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-run-httpd\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067369 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-config-data\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067394 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-log-httpd\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067440 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067486 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7rzn\" (UniqueName: \"kubernetes.io/projected/451416fc-3410-4c91-b25c-1ab949a84fd5-kube-api-access-x7rzn\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.067564 4861 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e179d8c8-2f07-4b5c-9db5-c26877356f86-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169219 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-scripts\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169296 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169319 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-run-httpd\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169350 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-config-data\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169368 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-log-httpd\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169399 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.169438 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7rzn\" (UniqueName: \"kubernetes.io/projected/451416fc-3410-4c91-b25c-1ab949a84fd5-kube-api-access-x7rzn\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.172009 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-log-httpd\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.172468 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-run-httpd\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.175538 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-scripts\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.175595 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-config-data\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.178957 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.187355 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.194883 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7rzn\" (UniqueName: \"kubernetes.io/projected/451416fc-3410-4c91-b25c-1ab949a84fd5-kube-api-access-x7rzn\") pod \"ceilometer-0\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.290118 4861 generic.go:334] "Generic (PLEG): container finished" podID="de3aa019-3497-44f8-a749-0f04313edd2a" containerID="07c2af27ff886eadebdfa623c717fae4a04ebd63f2adadb301c39c3b5f1a0cc5" exitCode=0 Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.290518 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nrf7f" event={"ID":"de3aa019-3497-44f8-a749-0f04313edd2a","Type":"ContainerDied","Data":"07c2af27ff886eadebdfa623c717fae4a04ebd63f2adadb301c39c3b5f1a0cc5"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.290555 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nrf7f" event={"ID":"de3aa019-3497-44f8-a749-0f04313edd2a","Type":"ContainerStarted","Data":"6ce6fdbf69057dcbc7f23b80fb2673e8291659e4d7aaecbb600b90156382f725"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.324100 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.330417 4861 generic.go:334] "Generic (PLEG): container finished" podID="b99e855f-4dbf-498a-b942-e22c86d392d2" containerID="56a9a18bc568ddb1e6cac074a993e40e90955d696853240f40c9a7c93d0aadf9" exitCode=0 Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.331348 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mvb4f" event={"ID":"b99e855f-4dbf-498a-b942-e22c86d392d2","Type":"ContainerDied","Data":"56a9a18bc568ddb1e6cac074a993e40e90955d696853240f40c9a7c93d0aadf9"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.331378 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mvb4f" event={"ID":"b99e855f-4dbf-498a-b942-e22c86d392d2","Type":"ContainerStarted","Data":"7bdc2db473c1b5df40faee25c993314c7685de54a8dedec022ccc779c0ec4083"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.362332 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.362381 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e179d8c8-2f07-4b5c-9db5-c26877356f86","Type":"ContainerDied","Data":"c5d9d53cbf341d79cff5c9fde80795a706d915ed837b35de0445be9a3563a4cd"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.362429 4861 scope.go:117] "RemoveContainer" containerID="8b5224a7e6863ed712c6aa8d296714089aaa6d9c8931092c695fc9b013d993dc" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.375768 4861 generic.go:334] "Generic (PLEG): container finished" podID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerID="344501de3bd1fcb02bbd1237b09f8d94d45792e9b54462601567e5eb4cdab6dd" exitCode=0 Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.375869 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b235907f-2bbf-4402-ac15-7c38c44a7461","Type":"ContainerDied","Data":"344501de3bd1fcb02bbd1237b09f8d94d45792e9b54462601567e5eb4cdab6dd"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.404417 4861 generic.go:334] "Generic (PLEG): container finished" podID="741f5e11-d2d0-405c-9826-929d9b2b072b" containerID="7dfa1e5dcb91520110a5b70f91b06059bef2e15ab26dd659ecb77af53e0f9f15" exitCode=0 Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.405211 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vwnhz" event={"ID":"741f5e11-d2d0-405c-9826-929d9b2b072b","Type":"ContainerDied","Data":"7dfa1e5dcb91520110a5b70f91b06059bef2e15ab26dd659ecb77af53e0f9f15"} Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.453737 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.506823 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.533113 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.534850 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.537620 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.537906 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.554578 4861 scope.go:117] "RemoveContainer" containerID="a55d2cf51b6106dad51dd75123df347fa3f197c41aedc1bac6fa6e18ca480564" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.589462 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.600697 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.702545 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.702902 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.702940 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f0d752d-7682-4244-9682-bf78e9a9d8ec-logs\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.702995 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.703041 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.703079 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.703117 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8tvq\" (UniqueName: \"kubernetes.io/projected/3f0d752d-7682-4244-9682-bf78e9a9d8ec-kube-api-access-z8tvq\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.703144 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3f0d752d-7682-4244-9682-bf78e9a9d8ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.738485 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.804815 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-httpd-run\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.804865 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-scripts\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.804929 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-internal-tls-certs\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.804980 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prfxf\" (UniqueName: \"kubernetes.io/projected/b235907f-2bbf-4402-ac15-7c38c44a7461-kube-api-access-prfxf\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805491 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805565 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-logs\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805597 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-combined-ca-bundle\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805619 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-config-data\") pod \"b235907f-2bbf-4402-ac15-7c38c44a7461\" (UID: \"b235907f-2bbf-4402-ac15-7c38c44a7461\") " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805817 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805862 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805858 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805921 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8tvq\" (UniqueName: \"kubernetes.io/projected/3f0d752d-7682-4244-9682-bf78e9a9d8ec-kube-api-access-z8tvq\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805946 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3f0d752d-7682-4244-9682-bf78e9a9d8ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.805983 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.806029 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.806058 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f0d752d-7682-4244-9682-bf78e9a9d8ec-logs\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.806108 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.806166 4861 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.811653 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.811794 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-logs" (OuterVolumeSpecName: "logs") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.812051 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.813171 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-scripts" (OuterVolumeSpecName: "scripts") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.820513 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b235907f-2bbf-4402-ac15-7c38c44a7461-kube-api-access-prfxf" (OuterVolumeSpecName: "kube-api-access-prfxf") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "kube-api-access-prfxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.825423 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3f0d752d-7682-4244-9682-bf78e9a9d8ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.825840 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f0d752d-7682-4244-9682-bf78e9a9d8ec-logs\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.838443 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.839341 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.864906 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.868828 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0d752d-7682-4244-9682-bf78e9a9d8ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.889341 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8tvq\" (UniqueName: \"kubernetes.io/projected/3f0d752d-7682-4244-9682-bf78e9a9d8ec-kube-api-access-z8tvq\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.897647 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3f0d752d-7682-4244-9682-bf78e9a9d8ec\") " pod="openstack/glance-default-external-api-0" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.908474 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b235907f-2bbf-4402-ac15-7c38c44a7461-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.908512 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.908527 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prfxf\" (UniqueName: \"kubernetes.io/projected/b235907f-2bbf-4402-ac15-7c38c44a7461-kube-api-access-prfxf\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.908555 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.938414 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.951520 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 03 13:52:11 crc kubenswrapper[4861]: I1003 13:52:11.958437 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-config-data" (OuterVolumeSpecName: "config-data") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.007487 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b235907f-2bbf-4402-ac15-7c38c44a7461" (UID: "b235907f-2bbf-4402-ac15-7c38c44a7461"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.012843 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.012873 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.012883 4861 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b235907f-2bbf-4402-ac15-7c38c44a7461-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.012891 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.084031 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.176903 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.457866 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"de8f27cb-a40f-4ab0-b709-4abad3ff72bb","Type":"ContainerStarted","Data":"b677638c3684d9b8d5ae3eb9cced5ebfd76d4ff605904dd0dee9394cfa93293d"} Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.459827 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b235907f-2bbf-4402-ac15-7c38c44a7461","Type":"ContainerDied","Data":"d85a364c6a7602d2789ae153b62171ec025beeadc66ed740e50aeb6cd3419de2"} Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.459862 4861 scope.go:117] "RemoveContainer" containerID="344501de3bd1fcb02bbd1237b09f8d94d45792e9b54462601567e5eb4cdab6dd" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.459882 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.469939 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerStarted","Data":"9e47c8042134ff47073f2b035441462240d9cfe06fa1bc4d667a22ce81e06ac5"} Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.528553 4861 scope.go:117] "RemoveContainer" containerID="31943b45a956b02a6ab0773b2a3fe2171422f6989c6c5f028fa28e27430e1e0f" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.535133 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.550997 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.576607 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:52:12 crc kubenswrapper[4861]: E1003 13:52:12.577139 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-httpd" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.577154 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-httpd" Oct 03 13:52:12 crc kubenswrapper[4861]: E1003 13:52:12.577182 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-log" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.577189 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-log" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.577428 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-httpd" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.577446 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" containerName="glance-log" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.578635 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.583626 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.583844 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.593460 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.697074 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3413d18c-4d35-4962-bc3f-b6750860b13d" path="/var/lib/kubelet/pods/3413d18c-4d35-4962-bc3f-b6750860b13d/volumes" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.698744 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b235907f-2bbf-4402-ac15-7c38c44a7461" path="/var/lib/kubelet/pods/b235907f-2bbf-4402-ac15-7c38c44a7461/volumes" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.700393 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e179d8c8-2f07-4b5c-9db5-c26877356f86" path="/var/lib/kubelet/pods/e179d8c8-2f07-4b5c-9db5-c26877356f86/volumes" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730161 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b86d\" (UniqueName: \"kubernetes.io/projected/a2693b41-01f0-48e9-b551-fa6c48d29531-kube-api-access-6b86d\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730258 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730308 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730327 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730352 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730380 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730409 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2693b41-01f0-48e9-b551-fa6c48d29531-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.730440 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2693b41-01f0-48e9-b551-fa6c48d29531-logs\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832270 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832343 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832365 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832388 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832420 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832450 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2693b41-01f0-48e9-b551-fa6c48d29531-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832483 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2693b41-01f0-48e9-b551-fa6c48d29531-logs\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.832518 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b86d\" (UniqueName: \"kubernetes.io/projected/a2693b41-01f0-48e9-b551-fa6c48d29531-kube-api-access-6b86d\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.835501 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2693b41-01f0-48e9-b551-fa6c48d29531-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.835835 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2693b41-01f0-48e9-b551-fa6c48d29531-logs\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.836138 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.861642 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b86d\" (UniqueName: \"kubernetes.io/projected/a2693b41-01f0-48e9-b551-fa6c48d29531-kube-api-access-6b86d\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.866953 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.867775 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.877373 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:12 crc kubenswrapper[4861]: I1003 13:52:12.888868 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2693b41-01f0-48e9-b551-fa6c48d29531-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.052977 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a2693b41-01f0-48e9-b551-fa6c48d29531\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.125547 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.240591 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.404243 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.462200 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.483436 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.520506 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f8kp\" (UniqueName: \"kubernetes.io/projected/741f5e11-d2d0-405c-9826-929d9b2b072b-kube-api-access-7f8kp\") pod \"741f5e11-d2d0-405c-9826-929d9b2b072b\" (UID: \"741f5e11-d2d0-405c-9826-929d9b2b072b\") " Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.526485 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/741f5e11-d2d0-405c-9826-929d9b2b072b-kube-api-access-7f8kp" (OuterVolumeSpecName: "kube-api-access-7f8kp") pod "741f5e11-d2d0-405c-9826-929d9b2b072b" (UID: "741f5e11-d2d0-405c-9826-929d9b2b072b"). InnerVolumeSpecName "kube-api-access-7f8kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.534500 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3f0d752d-7682-4244-9682-bf78e9a9d8ec","Type":"ContainerStarted","Data":"56ec3649bb86906bff7d9af24f211c89bd377bac70080f342da5647c1e5584f2"} Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.553243 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vwnhz" event={"ID":"741f5e11-d2d0-405c-9826-929d9b2b072b","Type":"ContainerDied","Data":"b0c554587940b28ee6a6edf3562d17c03bed67508391a458b4b7c05cef75d2d8"} Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.553278 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0c554587940b28ee6a6edf3562d17c03bed67508391a458b4b7c05cef75d2d8" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.553326 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vwnhz" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.578624 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nrf7f" event={"ID":"de3aa019-3497-44f8-a749-0f04313edd2a","Type":"ContainerDied","Data":"6ce6fdbf69057dcbc7f23b80fb2673e8291659e4d7aaecbb600b90156382f725"} Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.578659 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ce6fdbf69057dcbc7f23b80fb2673e8291659e4d7aaecbb600b90156382f725" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.578710 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nrf7f" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.581991 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mvb4f" event={"ID":"b99e855f-4dbf-498a-b942-e22c86d392d2","Type":"ContainerDied","Data":"7bdc2db473c1b5df40faee25c993314c7685de54a8dedec022ccc779c0ec4083"} Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.582029 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bdc2db473c1b5df40faee25c993314c7685de54a8dedec022ccc779c0ec4083" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.582091 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mvb4f" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.586430 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"de8f27cb-a40f-4ab0-b709-4abad3ff72bb","Type":"ContainerStarted","Data":"c0d5e9fa254067ef9adc0c81ef686b6cedd21dc3070ad45c1107dc650290c966"} Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.622083 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpp9n\" (UniqueName: \"kubernetes.io/projected/b99e855f-4dbf-498a-b942-e22c86d392d2-kube-api-access-wpp9n\") pod \"b99e855f-4dbf-498a-b942-e22c86d392d2\" (UID: \"b99e855f-4dbf-498a-b942-e22c86d392d2\") " Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.622241 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t74cx\" (UniqueName: \"kubernetes.io/projected/de3aa019-3497-44f8-a749-0f04313edd2a-kube-api-access-t74cx\") pod \"de3aa019-3497-44f8-a749-0f04313edd2a\" (UID: \"de3aa019-3497-44f8-a749-0f04313edd2a\") " Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.622623 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f8kp\" (UniqueName: \"kubernetes.io/projected/741f5e11-d2d0-405c-9826-929d9b2b072b-kube-api-access-7f8kp\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.626514 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de3aa019-3497-44f8-a749-0f04313edd2a-kube-api-access-t74cx" (OuterVolumeSpecName: "kube-api-access-t74cx") pod "de3aa019-3497-44f8-a749-0f04313edd2a" (UID: "de3aa019-3497-44f8-a749-0f04313edd2a"). InnerVolumeSpecName "kube-api-access-t74cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.627165 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b99e855f-4dbf-498a-b942-e22c86d392d2-kube-api-access-wpp9n" (OuterVolumeSpecName: "kube-api-access-wpp9n") pod "b99e855f-4dbf-498a-b942-e22c86d392d2" (UID: "b99e855f-4dbf-498a-b942-e22c86d392d2"). InnerVolumeSpecName "kube-api-access-wpp9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.724115 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t74cx\" (UniqueName: \"kubernetes.io/projected/de3aa019-3497-44f8-a749-0f04313edd2a-kube-api-access-t74cx\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:13 crc kubenswrapper[4861]: I1003 13:52:13.724151 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpp9n\" (UniqueName: \"kubernetes.io/projected/b99e855f-4dbf-498a-b942-e22c86d392d2-kube-api-access-wpp9n\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.017269 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.604214 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerStarted","Data":"cde792d4e1541b94d718d915409182a4b62d2cb92effd38dfc717a6370852401"} Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.604728 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerStarted","Data":"36bef156e6a6c24614cae163dcadbb22af40e4e050077cd5cc81b1948cb1d5a1"} Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.608493 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"de8f27cb-a40f-4ab0-b709-4abad3ff72bb","Type":"ContainerStarted","Data":"1f2d98c4d86847857c0fa38d48fc04818a01b03f708cdbd1102b0f2ca6fd5a36"} Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.608741 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.629205 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a2693b41-01f0-48e9-b551-fa6c48d29531","Type":"ContainerStarted","Data":"777f1e254006bbc6a7de913525259a5ce0dce488709e9e7868fddfbe18fdabec"} Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.631047 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3f0d752d-7682-4244-9682-bf78e9a9d8ec","Type":"ContainerStarted","Data":"39e889cf82a19fc2774f2810e2d9532fb3806cc00e3dfaa4478c8222fa9b6692"} Oct 03 13:52:14 crc kubenswrapper[4861]: I1003 13:52:14.642805 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.642783493 podStartE2EDuration="4.642783493s" podCreationTimestamp="2025-10-03 13:52:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:52:14.626641314 +0000 UTC m=+1248.624626361" watchObservedRunningTime="2025-10-03 13:52:14.642783493 +0000 UTC m=+1248.640768540" Oct 03 13:52:15 crc kubenswrapper[4861]: I1003 13:52:15.659148 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerStarted","Data":"fc93d065d12323a78418c7de7e89fddc38e8c41f7852ecf8b22ac895992c6836"} Oct 03 13:52:15 crc kubenswrapper[4861]: I1003 13:52:15.665548 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a2693b41-01f0-48e9-b551-fa6c48d29531","Type":"ContainerStarted","Data":"4bd7bbdd206bc35105996ab2f60b6d413b3999744f3dbf6eadf7ad5a33507a80"} Oct 03 13:52:15 crc kubenswrapper[4861]: I1003 13:52:15.665593 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a2693b41-01f0-48e9-b551-fa6c48d29531","Type":"ContainerStarted","Data":"49ab3ffe558cf555bc7a877c53c327d79e6cc5d994a2fd175cbfd51c96acd8c0"} Oct 03 13:52:15 crc kubenswrapper[4861]: I1003 13:52:15.671592 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3f0d752d-7682-4244-9682-bf78e9a9d8ec","Type":"ContainerStarted","Data":"8c46dab4f304fffa7be63bb1a9b1d5e813e09972f49fa00304305d90fe84205d"} Oct 03 13:52:15 crc kubenswrapper[4861]: I1003 13:52:15.746824 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.746801768 podStartE2EDuration="3.746801768s" podCreationTimestamp="2025-10-03 13:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:52:15.701904715 +0000 UTC m=+1249.699889762" watchObservedRunningTime="2025-10-03 13:52:15.746801768 +0000 UTC m=+1249.744786815" Oct 03 13:52:15 crc kubenswrapper[4861]: I1003 13:52:15.802222 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.8022013999999995 podStartE2EDuration="4.8022014s" podCreationTimestamp="2025-10-03 13:52:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:52:15.745627967 +0000 UTC m=+1249.743613024" watchObservedRunningTime="2025-10-03 13:52:15.8022014 +0000 UTC m=+1249.800186447" Oct 03 13:52:17 crc kubenswrapper[4861]: I1003 13:52:17.691511 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerStarted","Data":"5014e25b56825833654c66c9600543b8250cb8180044814e00e92a1f531846ce"} Oct 03 13:52:17 crc kubenswrapper[4861]: I1003 13:52:17.692102 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:52:17 crc kubenswrapper[4861]: I1003 13:52:17.720943 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.854427254 podStartE2EDuration="7.720922392s" podCreationTimestamp="2025-10-03 13:52:10 +0000 UTC" firstStartedPulling="2025-10-03 13:52:12.124399177 +0000 UTC m=+1246.122384224" lastFinishedPulling="2025-10-03 13:52:16.990894315 +0000 UTC m=+1250.988879362" observedRunningTime="2025-10-03 13:52:17.715385965 +0000 UTC m=+1251.713371032" watchObservedRunningTime="2025-10-03 13:52:17.720922392 +0000 UTC m=+1251.718907439" Oct 03 13:52:19 crc kubenswrapper[4861]: I1003 13:52:19.167944 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:19 crc kubenswrapper[4861]: I1003 13:52:19.712116 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-central-agent" containerID="cri-o://36bef156e6a6c24614cae163dcadbb22af40e4e050077cd5cc81b1948cb1d5a1" gracePeriod=30 Oct 03 13:52:19 crc kubenswrapper[4861]: I1003 13:52:19.712175 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="sg-core" containerID="cri-o://fc93d065d12323a78418c7de7e89fddc38e8c41f7852ecf8b22ac895992c6836" gracePeriod=30 Oct 03 13:52:19 crc kubenswrapper[4861]: I1003 13:52:19.712226 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="proxy-httpd" containerID="cri-o://5014e25b56825833654c66c9600543b8250cb8180044814e00e92a1f531846ce" gracePeriod=30 Oct 03 13:52:19 crc kubenswrapper[4861]: I1003 13:52:19.712260 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-notification-agent" containerID="cri-o://cde792d4e1541b94d718d915409182a4b62d2cb92effd38dfc717a6370852401" gracePeriod=30 Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.744906 4861 generic.go:334] "Generic (PLEG): container finished" podID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerID="5014e25b56825833654c66c9600543b8250cb8180044814e00e92a1f531846ce" exitCode=0 Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745462 4861 generic.go:334] "Generic (PLEG): container finished" podID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerID="fc93d065d12323a78418c7de7e89fddc38e8c41f7852ecf8b22ac895992c6836" exitCode=2 Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745475 4861 generic.go:334] "Generic (PLEG): container finished" podID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerID="cde792d4e1541b94d718d915409182a4b62d2cb92effd38dfc717a6370852401" exitCode=0 Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745485 4861 generic.go:334] "Generic (PLEG): container finished" podID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerID="36bef156e6a6c24614cae163dcadbb22af40e4e050077cd5cc81b1948cb1d5a1" exitCode=0 Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745506 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerDied","Data":"5014e25b56825833654c66c9600543b8250cb8180044814e00e92a1f531846ce"} Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745538 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerDied","Data":"fc93d065d12323a78418c7de7e89fddc38e8c41f7852ecf8b22ac895992c6836"} Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745550 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerDied","Data":"cde792d4e1541b94d718d915409182a4b62d2cb92effd38dfc717a6370852401"} Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.745560 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerDied","Data":"36bef156e6a6c24614cae163dcadbb22af40e4e050077cd5cc81b1948cb1d5a1"} Oct 03 13:52:20 crc kubenswrapper[4861]: I1003 13:52:20.945093 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076429 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7rzn\" (UniqueName: \"kubernetes.io/projected/451416fc-3410-4c91-b25c-1ab949a84fd5-kube-api-access-x7rzn\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076535 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-run-httpd\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076662 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-combined-ca-bundle\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076757 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-sg-core-conf-yaml\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076785 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-config-data\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076800 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-log-httpd\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.076848 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-scripts\") pod \"451416fc-3410-4c91-b25c-1ab949a84fd5\" (UID: \"451416fc-3410-4c91-b25c-1ab949a84fd5\") " Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.077655 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.077792 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.078001 4861 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.078025 4861 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/451416fc-3410-4c91-b25c-1ab949a84fd5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.083629 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/451416fc-3410-4c91-b25c-1ab949a84fd5-kube-api-access-x7rzn" (OuterVolumeSpecName: "kube-api-access-x7rzn") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "kube-api-access-x7rzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.098376 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-scripts" (OuterVolumeSpecName: "scripts") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.112394 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.179445 4861 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.179500 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.179509 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7rzn\" (UniqueName: \"kubernetes.io/projected/451416fc-3410-4c91-b25c-1ab949a84fd5-kube-api-access-x7rzn\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.212688 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.216777 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-config-data" (OuterVolumeSpecName: "config-data") pod "451416fc-3410-4c91-b25c-1ab949a84fd5" (UID: "451416fc-3410-4c91-b25c-1ab949a84fd5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.281331 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.281533 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/451416fc-3410-4c91-b25c-1ab949a84fd5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.756026 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"451416fc-3410-4c91-b25c-1ab949a84fd5","Type":"ContainerDied","Data":"9e47c8042134ff47073f2b035441462240d9cfe06fa1bc4d667a22ce81e06ac5"} Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.756119 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.757094 4861 scope.go:117] "RemoveContainer" containerID="5014e25b56825833654c66c9600543b8250cb8180044814e00e92a1f531846ce" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.784038 4861 scope.go:117] "RemoveContainer" containerID="fc93d065d12323a78418c7de7e89fddc38e8c41f7852ecf8b22ac895992c6836" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.805262 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.820164 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.825399 4861 scope.go:117] "RemoveContainer" containerID="cde792d4e1541b94d718d915409182a4b62d2cb92effd38dfc717a6370852401" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.848409 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.848839 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="sg-core" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.848858 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="sg-core" Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.848904 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-central-agent" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.848915 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-central-agent" Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.848951 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-notification-agent" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.848958 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-notification-agent" Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.848977 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de3aa019-3497-44f8-a749-0f04313edd2a" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.848984 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="de3aa019-3497-44f8-a749-0f04313edd2a" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.848997 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99e855f-4dbf-498a-b942-e22c86d392d2" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849005 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99e855f-4dbf-498a-b942-e22c86d392d2" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.849016 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741f5e11-d2d0-405c-9826-929d9b2b072b" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849022 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="741f5e11-d2d0-405c-9826-929d9b2b072b" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: E1003 13:52:21.849037 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="proxy-httpd" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849043 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="proxy-httpd" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849267 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="de3aa019-3497-44f8-a749-0f04313edd2a" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849297 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="proxy-httpd" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849313 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="741f5e11-d2d0-405c-9826-929d9b2b072b" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849326 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="sg-core" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849343 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-central-agent" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849356 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b99e855f-4dbf-498a-b942-e22c86d392d2" containerName="mariadb-database-create" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.849371 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" containerName="ceilometer-notification-agent" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.851294 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.856812 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.856813 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.861133 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.871177 4861 scope.go:117] "RemoveContainer" containerID="36bef156e6a6c24614cae163dcadbb22af40e4e050077cd5cc81b1948cb1d5a1" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.996680 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7w46\" (UniqueName: \"kubernetes.io/projected/c80d8008-32d3-4420-9ccc-aac9d31abfdd-kube-api-access-k7w46\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.996763 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.996802 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-config-data\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.996864 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-log-httpd\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.996955 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-scripts\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.996979 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:21 crc kubenswrapper[4861]: I1003 13:52:21.997012 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-run-httpd\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.098843 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-log-httpd\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.098930 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-scripts\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.098953 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.098992 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-run-httpd\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.099052 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7w46\" (UniqueName: \"kubernetes.io/projected/c80d8008-32d3-4420-9ccc-aac9d31abfdd-kube-api-access-k7w46\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.099091 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.099119 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-config-data\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.099505 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-log-httpd\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.099787 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-run-httpd\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.104200 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.105938 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-scripts\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.111001 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-config-data\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.111618 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.121487 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7w46\" (UniqueName: \"kubernetes.io/projected/c80d8008-32d3-4420-9ccc-aac9d31abfdd-kube-api-access-k7w46\") pod \"ceilometer-0\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.177868 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.177943 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.206686 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.226347 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.234839 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.267407 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.449643 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.694460 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="451416fc-3410-4c91-b25c-1ab949a84fd5" path="/var/lib/kubelet/pods/451416fc-3410-4c91-b25c-1ab949a84fd5/volumes" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.766766 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.766799 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:52:22 crc kubenswrapper[4861]: I1003 13:52:22.804084 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.021121 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.243696 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.243742 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.293721 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.318052 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.777716 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerStarted","Data":"5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505"} Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.778744 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerStarted","Data":"5d1ad5c12fd5233bdacd98449e528be1bb24d3683f44b1f6f2b9a89ce84bdb3a"} Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.778876 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:23 crc kubenswrapper[4861]: I1003 13:52:23.778966 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.147313 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-67b7-account-create-fmlf7"] Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.148696 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.151788 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.156298 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-67b7-account-create-fmlf7"] Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.255901 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brkm6\" (UniqueName: \"kubernetes.io/projected/12d3cd9b-e5e3-4b38-a84b-af709f55101d-kube-api-access-brkm6\") pod \"nova-api-67b7-account-create-fmlf7\" (UID: \"12d3cd9b-e5e3-4b38-a84b-af709f55101d\") " pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.334322 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-9a1b-account-create-6vwmh"] Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.345051 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.357196 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.359060 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brkm6\" (UniqueName: \"kubernetes.io/projected/12d3cd9b-e5e3-4b38-a84b-af709f55101d-kube-api-access-brkm6\") pod \"nova-api-67b7-account-create-fmlf7\" (UID: \"12d3cd9b-e5e3-4b38-a84b-af709f55101d\") " pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.363494 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-9a1b-account-create-6vwmh"] Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.410072 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brkm6\" (UniqueName: \"kubernetes.io/projected/12d3cd9b-e5e3-4b38-a84b-af709f55101d-kube-api-access-brkm6\") pod \"nova-api-67b7-account-create-fmlf7\" (UID: \"12d3cd9b-e5e3-4b38-a84b-af709f55101d\") " pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.460168 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzcpt\" (UniqueName: \"kubernetes.io/projected/14b33ce2-2360-44ed-93ac-3ccfbeeceab9-kube-api-access-jzcpt\") pod \"nova-cell0-9a1b-account-create-6vwmh\" (UID: \"14b33ce2-2360-44ed-93ac-3ccfbeeceab9\") " pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.473210 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.545868 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1189-account-create-9r84n"] Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.548124 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.554214 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.561898 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzcpt\" (UniqueName: \"kubernetes.io/projected/14b33ce2-2360-44ed-93ac-3ccfbeeceab9-kube-api-access-jzcpt\") pod \"nova-cell0-9a1b-account-create-6vwmh\" (UID: \"14b33ce2-2360-44ed-93ac-3ccfbeeceab9\") " pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.580739 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1189-account-create-9r84n"] Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.589196 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.594312 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzcpt\" (UniqueName: \"kubernetes.io/projected/14b33ce2-2360-44ed-93ac-3ccfbeeceab9-kube-api-access-jzcpt\") pod \"nova-cell0-9a1b-account-create-6vwmh\" (UID: \"14b33ce2-2360-44ed-93ac-3ccfbeeceab9\") " pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.668889 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j46gv\" (UniqueName: \"kubernetes.io/projected/7d594b62-d7f8-4dde-b366-b5389e6315e6-kube-api-access-j46gv\") pod \"nova-cell1-1189-account-create-9r84n\" (UID: \"7d594b62-d7f8-4dde-b366-b5389e6315e6\") " pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.774507 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j46gv\" (UniqueName: \"kubernetes.io/projected/7d594b62-d7f8-4dde-b366-b5389e6315e6-kube-api-access-j46gv\") pod \"nova-cell1-1189-account-create-9r84n\" (UID: \"7d594b62-d7f8-4dde-b366-b5389e6315e6\") " pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.782287 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.809918 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j46gv\" (UniqueName: \"kubernetes.io/projected/7d594b62-d7f8-4dde-b366-b5389e6315e6-kube-api-access-j46gv\") pod \"nova-cell1-1189-account-create-9r84n\" (UID: \"7d594b62-d7f8-4dde-b366-b5389e6315e6\") " pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:24 crc kubenswrapper[4861]: I1003 13:52:24.977814 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.138278 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-67b7-account-create-fmlf7"] Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.424516 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-9a1b-account-create-6vwmh"] Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.475653 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.660995 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1189-account-create-9r84n"] Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.824347 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" event={"ID":"14b33ce2-2360-44ed-93ac-3ccfbeeceab9","Type":"ContainerStarted","Data":"9ac1d677f163139bc95bf4a29693e322d070516f02336e619d7e17e1e6a82399"} Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.838678 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerStarted","Data":"0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94"} Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.844252 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1189-account-create-9r84n" event={"ID":"7d594b62-d7f8-4dde-b366-b5389e6315e6","Type":"ContainerStarted","Data":"a3dfddc53a3f7499b73de142e026da1b9edb3c402235646f7c0ddc4b204d4645"} Oct 03 13:52:25 crc kubenswrapper[4861]: I1003 13:52:25.848744 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-67b7-account-create-fmlf7" event={"ID":"12d3cd9b-e5e3-4b38-a84b-af709f55101d","Type":"ContainerStarted","Data":"e61632401e17002b05cf3a77f1a0e72ebd480db2a6437cc7802256d9721ce3c3"} Oct 03 13:52:26 crc kubenswrapper[4861]: E1003 13:52:26.354192 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12d3cd9b_e5e3_4b38_a84b_af709f55101d.slice/crio-d097e3d70c620a3a71c9105e49d81f0f01983fda9593d971db74be64f45c0bd8.scope\": RecentStats: unable to find data in memory cache]" Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.604445 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6c8cb9d9fb-bt6ls" Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.746317 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84cdb7b9dd-jhc2h"] Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.746523 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon-log" containerID="cri-o://963ef2ef3f5f426b3763350ce9604beea4e5c0db8da7c36621492d44753ff880" gracePeriod=30 Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.746663 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" containerID="cri-o://65774e93f7a18a88876b7eea12fb7794958ba568a544b650edc95fff9801a980" gracePeriod=30 Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.932795 4861 generic.go:334] "Generic (PLEG): container finished" podID="14b33ce2-2360-44ed-93ac-3ccfbeeceab9" containerID="b63d596611496b1bf60a09cd0a92a97938c3fb47048907a23288fd26c7ef2972" exitCode=0 Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.933527 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" event={"ID":"14b33ce2-2360-44ed-93ac-3ccfbeeceab9","Type":"ContainerDied","Data":"b63d596611496b1bf60a09cd0a92a97938c3fb47048907a23288fd26c7ef2972"} Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.955932 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerStarted","Data":"04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467"} Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.969681 4861 generic.go:334] "Generic (PLEG): container finished" podID="7d594b62-d7f8-4dde-b366-b5389e6315e6" containerID="c02ec4cb324a8795a5afed12c060013264238460dae4c58492407110929bde31" exitCode=0 Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.971003 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1189-account-create-9r84n" event={"ID":"7d594b62-d7f8-4dde-b366-b5389e6315e6","Type":"ContainerDied","Data":"c02ec4cb324a8795a5afed12c060013264238460dae4c58492407110929bde31"} Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.980757 4861 generic.go:334] "Generic (PLEG): container finished" podID="12d3cd9b-e5e3-4b38-a84b-af709f55101d" containerID="d097e3d70c620a3a71c9105e49d81f0f01983fda9593d971db74be64f45c0bd8" exitCode=0 Oct 03 13:52:26 crc kubenswrapper[4861]: I1003 13:52:26.980806 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-67b7-account-create-fmlf7" event={"ID":"12d3cd9b-e5e3-4b38-a84b-af709f55101d","Type":"ContainerDied","Data":"d097e3d70c620a3a71c9105e49d81f0f01983fda9593d971db74be64f45c0bd8"} Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.028402 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="de8f27cb-a40f-4ab0-b709-4abad3ff72bb" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.170:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.051954 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.052063 4861 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.462749 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.522035 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzcpt\" (UniqueName: \"kubernetes.io/projected/14b33ce2-2360-44ed-93ac-3ccfbeeceab9-kube-api-access-jzcpt\") pod \"14b33ce2-2360-44ed-93ac-3ccfbeeceab9\" (UID: \"14b33ce2-2360-44ed-93ac-3ccfbeeceab9\") " Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.537463 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14b33ce2-2360-44ed-93ac-3ccfbeeceab9-kube-api-access-jzcpt" (OuterVolumeSpecName: "kube-api-access-jzcpt") pod "14b33ce2-2360-44ed-93ac-3ccfbeeceab9" (UID: "14b33ce2-2360-44ed-93ac-3ccfbeeceab9"). InnerVolumeSpecName "kube-api-access-jzcpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.587339 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.587443 4861 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.624587 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzcpt\" (UniqueName: \"kubernetes.io/projected/14b33ce2-2360-44ed-93ac-3ccfbeeceab9-kube-api-access-jzcpt\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.666578 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.691103 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.829321 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brkm6\" (UniqueName: \"kubernetes.io/projected/12d3cd9b-e5e3-4b38-a84b-af709f55101d-kube-api-access-brkm6\") pod \"12d3cd9b-e5e3-4b38-a84b-af709f55101d\" (UID: \"12d3cd9b-e5e3-4b38-a84b-af709f55101d\") " Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.829475 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j46gv\" (UniqueName: \"kubernetes.io/projected/7d594b62-d7f8-4dde-b366-b5389e6315e6-kube-api-access-j46gv\") pod \"7d594b62-d7f8-4dde-b366-b5389e6315e6\" (UID: \"7d594b62-d7f8-4dde-b366-b5389e6315e6\") " Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.833991 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12d3cd9b-e5e3-4b38-a84b-af709f55101d-kube-api-access-brkm6" (OuterVolumeSpecName: "kube-api-access-brkm6") pod "12d3cd9b-e5e3-4b38-a84b-af709f55101d" (UID: "12d3cd9b-e5e3-4b38-a84b-af709f55101d"). InnerVolumeSpecName "kube-api-access-brkm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.840422 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d594b62-d7f8-4dde-b366-b5389e6315e6-kube-api-access-j46gv" (OuterVolumeSpecName: "kube-api-access-j46gv") pod "7d594b62-d7f8-4dde-b366-b5389e6315e6" (UID: "7d594b62-d7f8-4dde-b366-b5389e6315e6"). InnerVolumeSpecName "kube-api-access-j46gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.931602 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j46gv\" (UniqueName: \"kubernetes.io/projected/7d594b62-d7f8-4dde-b366-b5389e6315e6-kube-api-access-j46gv\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:28 crc kubenswrapper[4861]: I1003 13:52:28.931645 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brkm6\" (UniqueName: \"kubernetes.io/projected/12d3cd9b-e5e3-4b38-a84b-af709f55101d-kube-api-access-brkm6\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.010501 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerStarted","Data":"95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231"} Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.010737 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-central-agent" containerID="cri-o://5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505" gracePeriod=30 Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.011365 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.011712 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="proxy-httpd" containerID="cri-o://95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231" gracePeriod=30 Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.011779 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="sg-core" containerID="cri-o://04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467" gracePeriod=30 Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.011835 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-notification-agent" containerID="cri-o://0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94" gracePeriod=30 Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.018352 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1189-account-create-9r84n" event={"ID":"7d594b62-d7f8-4dde-b366-b5389e6315e6","Type":"ContainerDied","Data":"a3dfddc53a3f7499b73de142e026da1b9edb3c402235646f7c0ddc4b204d4645"} Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.018385 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3dfddc53a3f7499b73de142e026da1b9edb3c402235646f7c0ddc4b204d4645" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.018447 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1189-account-create-9r84n" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.042599 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.07779974 podStartE2EDuration="8.042580539s" podCreationTimestamp="2025-10-03 13:52:21 +0000 UTC" firstStartedPulling="2025-10-03 13:52:22.822837555 +0000 UTC m=+1256.820822602" lastFinishedPulling="2025-10-03 13:52:27.787618364 +0000 UTC m=+1261.785603401" observedRunningTime="2025-10-03 13:52:29.031823574 +0000 UTC m=+1263.029808621" watchObservedRunningTime="2025-10-03 13:52:29.042580539 +0000 UTC m=+1263.040565586" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.047573 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-67b7-account-create-fmlf7" event={"ID":"12d3cd9b-e5e3-4b38-a84b-af709f55101d","Type":"ContainerDied","Data":"e61632401e17002b05cf3a77f1a0e72ebd480db2a6437cc7802256d9721ce3c3"} Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.047618 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e61632401e17002b05cf3a77f1a0e72ebd480db2a6437cc7802256d9721ce3c3" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.047695 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-67b7-account-create-fmlf7" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.064506 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" event={"ID":"14b33ce2-2360-44ed-93ac-3ccfbeeceab9","Type":"ContainerDied","Data":"9ac1d677f163139bc95bf4a29693e322d070516f02336e619d7e17e1e6a82399"} Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.064542 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ac1d677f163139bc95bf4a29693e322d070516f02336e619d7e17e1e6a82399" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.064546 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9a1b-account-create-6vwmh" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.096013 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.555741 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:52:29 crc kubenswrapper[4861]: I1003 13:52:29.973585 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:42866->10.217.0.143:8443: read: connection reset by peer" Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.076841 4861 generic.go:334] "Generic (PLEG): container finished" podID="c589e11a-4953-46ec-aeff-a83f6557421f" containerID="65774e93f7a18a88876b7eea12fb7794958ba568a544b650edc95fff9801a980" exitCode=0 Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.076882 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerDied","Data":"65774e93f7a18a88876b7eea12fb7794958ba568a544b650edc95fff9801a980"} Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.077307 4861 scope.go:117] "RemoveContainer" containerID="ddeaa56e9aa6c6ba89a75f8f405df5f2eebbad9b1e8dee7d0758a5aa07447be8" Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.081252 4861 generic.go:334] "Generic (PLEG): container finished" podID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerID="95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231" exitCode=0 Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.081285 4861 generic.go:334] "Generic (PLEG): container finished" podID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerID="04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467" exitCode=2 Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.081293 4861 generic.go:334] "Generic (PLEG): container finished" podID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerID="0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94" exitCode=0 Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.081298 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerDied","Data":"95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231"} Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.081340 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerDied","Data":"04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467"} Oct 03 13:52:30 crc kubenswrapper[4861]: I1003 13:52:30.081350 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerDied","Data":"0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94"} Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.658708 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wk4nw"] Oct 03 13:52:34 crc kubenswrapper[4861]: E1003 13:52:34.659467 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d594b62-d7f8-4dde-b366-b5389e6315e6" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.659480 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d594b62-d7f8-4dde-b366-b5389e6315e6" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: E1003 13:52:34.659490 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14b33ce2-2360-44ed-93ac-3ccfbeeceab9" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.659496 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="14b33ce2-2360-44ed-93ac-3ccfbeeceab9" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: E1003 13:52:34.659530 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12d3cd9b-e5e3-4b38-a84b-af709f55101d" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.659538 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="12d3cd9b-e5e3-4b38-a84b-af709f55101d" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.659696 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="12d3cd9b-e5e3-4b38-a84b-af709f55101d" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.659709 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d594b62-d7f8-4dde-b366-b5389e6315e6" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.659728 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="14b33ce2-2360-44ed-93ac-3ccfbeeceab9" containerName="mariadb-account-create" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.666119 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.670161 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.683598 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7sksh" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.684199 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.723138 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wk4nw"] Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.764393 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5xhk\" (UniqueName: \"kubernetes.io/projected/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-kube-api-access-d5xhk\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.764481 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-scripts\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.764529 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.764701 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-config-data\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.866112 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-scripts\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.866154 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.866319 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-config-data\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.866353 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5xhk\" (UniqueName: \"kubernetes.io/projected/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-kube-api-access-d5xhk\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.872155 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.872362 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-config-data\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.878804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-scripts\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:34 crc kubenswrapper[4861]: I1003 13:52:34.888958 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5xhk\" (UniqueName: \"kubernetes.io/projected/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-kube-api-access-d5xhk\") pod \"nova-cell0-conductor-db-sync-wk4nw\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:35 crc kubenswrapper[4861]: I1003 13:52:35.036500 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:35 crc kubenswrapper[4861]: I1003 13:52:35.609939 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wk4nw"] Oct 03 13:52:36 crc kubenswrapper[4861]: I1003 13:52:36.167622 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" event={"ID":"a88488b7-b658-4fc0-8d27-7f69eb12d4c4","Type":"ContainerStarted","Data":"a5e1b97ef93c095862a9aca7cf0b65962e168e77f476fd24689ccd743eb82f8b"} Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.020387 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118040 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-run-httpd\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118118 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-sg-core-conf-yaml\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118271 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-log-httpd\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118294 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-scripts\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118337 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-config-data\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118362 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-combined-ca-bundle\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118459 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7w46\" (UniqueName: \"kubernetes.io/projected/c80d8008-32d3-4420-9ccc-aac9d31abfdd-kube-api-access-k7w46\") pod \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\" (UID: \"c80d8008-32d3-4420-9ccc-aac9d31abfdd\") " Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118723 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.118984 4861 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.119473 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.128836 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c80d8008-32d3-4420-9ccc-aac9d31abfdd-kube-api-access-k7w46" (OuterVolumeSpecName: "kube-api-access-k7w46") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "kube-api-access-k7w46". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.129264 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-scripts" (OuterVolumeSpecName: "scripts") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.165013 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.193876 4861 generic.go:334] "Generic (PLEG): container finished" podID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerID="5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505" exitCode=0 Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.193935 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerDied","Data":"5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505"} Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.193967 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c80d8008-32d3-4420-9ccc-aac9d31abfdd","Type":"ContainerDied","Data":"5d1ad5c12fd5233bdacd98449e528be1bb24d3683f44b1f6f2b9a89ce84bdb3a"} Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.193990 4861 scope.go:117] "RemoveContainer" containerID="95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.194155 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.216726 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.222333 4861 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.222364 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.222373 4861 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c80d8008-32d3-4420-9ccc-aac9d31abfdd-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.222382 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.222391 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7w46\" (UniqueName: \"kubernetes.io/projected/c80d8008-32d3-4420-9ccc-aac9d31abfdd-kube-api-access-k7w46\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.256824 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-config-data" (OuterVolumeSpecName: "config-data") pod "c80d8008-32d3-4420-9ccc-aac9d31abfdd" (UID: "c80d8008-32d3-4420-9ccc-aac9d31abfdd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.268284 4861 scope.go:117] "RemoveContainer" containerID="04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.295003 4861 scope.go:117] "RemoveContainer" containerID="0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.313806 4861 scope.go:117] "RemoveContainer" containerID="5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.323815 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c80d8008-32d3-4420-9ccc-aac9d31abfdd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.334523 4861 scope.go:117] "RemoveContainer" containerID="95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.335161 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231\": container with ID starting with 95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231 not found: ID does not exist" containerID="95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.335219 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231"} err="failed to get container status \"95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231\": rpc error: code = NotFound desc = could not find container \"95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231\": container with ID starting with 95787a0c1e580b0f0f75a8ba6c4b924d71d629882cd3f0139482cfe791d52231 not found: ID does not exist" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.335272 4861 scope.go:117] "RemoveContainer" containerID="04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.336480 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467\": container with ID starting with 04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467 not found: ID does not exist" containerID="04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.336521 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467"} err="failed to get container status \"04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467\": rpc error: code = NotFound desc = could not find container \"04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467\": container with ID starting with 04d55605a5c143bca82ff68aa757f1c1f3f73be4b59777d2dbceaee1ca336467 not found: ID does not exist" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.336549 4861 scope.go:117] "RemoveContainer" containerID="0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.337065 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94\": container with ID starting with 0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94 not found: ID does not exist" containerID="0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.337093 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94"} err="failed to get container status \"0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94\": rpc error: code = NotFound desc = could not find container \"0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94\": container with ID starting with 0806cc2f23ecfeef7780bdf5146f4d5f6facb48d32271644611d367b8efdbb94 not found: ID does not exist" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.337109 4861 scope.go:117] "RemoveContainer" containerID="5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.338063 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505\": container with ID starting with 5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505 not found: ID does not exist" containerID="5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.338091 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505"} err="failed to get container status \"5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505\": rpc error: code = NotFound desc = could not find container \"5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505\": container with ID starting with 5043c1e309c91b98a6430300f3fc3712ccb4b2c399d6023839794c05a748e505 not found: ID does not exist" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.541118 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.565800 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577169 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.577614 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-central-agent" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577639 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-central-agent" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.577671 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="proxy-httpd" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577699 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="proxy-httpd" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.577712 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-notification-agent" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577719 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-notification-agent" Oct 03 13:52:38 crc kubenswrapper[4861]: E1003 13:52:38.577729 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="sg-core" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577735 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="sg-core" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577921 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="proxy-httpd" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577931 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-notification-agent" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577948 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="ceilometer-central-agent" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.577957 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" containerName="sg-core" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.580017 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.587044 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.588950 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.589602 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.595522 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.694453 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c80d8008-32d3-4420-9ccc-aac9d31abfdd" path="/var/lib/kubelet/pods/c80d8008-32d3-4420-9ccc-aac9d31abfdd/volumes" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.730964 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-config-data\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.731008 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.731031 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-run-httpd\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.731059 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.731125 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-log-httpd\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.731143 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx5pk\" (UniqueName: \"kubernetes.io/projected/683e1388-79ac-4d9c-8efb-60171477f47d-kube-api-access-wx5pk\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.731177 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-scripts\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.831934 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-config-data\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.831976 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832016 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-run-httpd\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832044 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832094 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-log-httpd\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832119 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx5pk\" (UniqueName: \"kubernetes.io/projected/683e1388-79ac-4d9c-8efb-60171477f47d-kube-api-access-wx5pk\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832168 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-scripts\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832695 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-run-httpd\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.832750 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-log-httpd\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.836618 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.837002 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-config-data\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.837438 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.850157 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-scripts\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.855043 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx5pk\" (UniqueName: \"kubernetes.io/projected/683e1388-79ac-4d9c-8efb-60171477f47d-kube-api-access-wx5pk\") pod \"ceilometer-0\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " pod="openstack/ceilometer-0" Oct 03 13:52:38 crc kubenswrapper[4861]: I1003 13:52:38.901257 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:41 crc kubenswrapper[4861]: I1003 13:52:41.875416 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:45 crc kubenswrapper[4861]: I1003 13:52:45.207488 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:45 crc kubenswrapper[4861]: W1003 13:52:45.223312 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod683e1388_79ac_4d9c_8efb_60171477f47d.slice/crio-1957850d52ee2cc41f88884b012ea7eec36898a3f029fdaf7bcf1bfd90d2161e WatchSource:0}: Error finding container 1957850d52ee2cc41f88884b012ea7eec36898a3f029fdaf7bcf1bfd90d2161e: Status 404 returned error can't find the container with id 1957850d52ee2cc41f88884b012ea7eec36898a3f029fdaf7bcf1bfd90d2161e Oct 03 13:52:45 crc kubenswrapper[4861]: I1003 13:52:45.277937 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerStarted","Data":"1957850d52ee2cc41f88884b012ea7eec36898a3f029fdaf7bcf1bfd90d2161e"} Oct 03 13:52:45 crc kubenswrapper[4861]: I1003 13:52:45.280044 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" event={"ID":"a88488b7-b658-4fc0-8d27-7f69eb12d4c4","Type":"ContainerStarted","Data":"7aed167b995159e78a5930e172770220f5bac8d469b0f6e2586d96846bfc6b93"} Oct 03 13:52:45 crc kubenswrapper[4861]: I1003 13:52:45.301531 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" podStartSLOduration=2.091117026 podStartE2EDuration="11.301506554s" podCreationTimestamp="2025-10-03 13:52:34 +0000 UTC" firstStartedPulling="2025-10-03 13:52:35.615755815 +0000 UTC m=+1269.613740872" lastFinishedPulling="2025-10-03 13:52:44.826145353 +0000 UTC m=+1278.824130400" observedRunningTime="2025-10-03 13:52:45.296681926 +0000 UTC m=+1279.294666973" watchObservedRunningTime="2025-10-03 13:52:45.301506554 +0000 UTC m=+1279.299491621" Oct 03 13:52:46 crc kubenswrapper[4861]: I1003 13:52:46.306144 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerStarted","Data":"8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f"} Oct 03 13:52:47 crc kubenswrapper[4861]: I1003 13:52:47.317398 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerStarted","Data":"efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206"} Oct 03 13:52:48 crc kubenswrapper[4861]: I1003 13:52:48.327124 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerStarted","Data":"aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e"} Oct 03 13:52:48 crc kubenswrapper[4861]: I1003 13:52:48.594559 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-84cdb7b9dd-jhc2h" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:52:48 crc kubenswrapper[4861]: I1003 13:52:48.594700 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.337171 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerStarted","Data":"e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150"} Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.337648 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-central-agent" containerID="cri-o://8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f" gracePeriod=30 Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.337844 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="proxy-httpd" containerID="cri-o://e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150" gracePeriod=30 Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.337895 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="sg-core" containerID="cri-o://aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e" gracePeriod=30 Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.337925 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-notification-agent" containerID="cri-o://efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206" gracePeriod=30 Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.338029 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:52:49 crc kubenswrapper[4861]: I1003 13:52:49.373020 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=7.71267857 podStartE2EDuration="11.373004588s" podCreationTimestamp="2025-10-03 13:52:38 +0000 UTC" firstStartedPulling="2025-10-03 13:52:45.233401944 +0000 UTC m=+1279.231386991" lastFinishedPulling="2025-10-03 13:52:48.893727962 +0000 UTC m=+1282.891713009" observedRunningTime="2025-10-03 13:52:49.370601334 +0000 UTC m=+1283.368586381" watchObservedRunningTime="2025-10-03 13:52:49.373004588 +0000 UTC m=+1283.370989625" Oct 03 13:52:50 crc kubenswrapper[4861]: I1003 13:52:50.349685 4861 generic.go:334] "Generic (PLEG): container finished" podID="683e1388-79ac-4d9c-8efb-60171477f47d" containerID="e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150" exitCode=0 Oct 03 13:52:50 crc kubenswrapper[4861]: I1003 13:52:50.349717 4861 generic.go:334] "Generic (PLEG): container finished" podID="683e1388-79ac-4d9c-8efb-60171477f47d" containerID="aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e" exitCode=2 Oct 03 13:52:50 crc kubenswrapper[4861]: I1003 13:52:50.349727 4861 generic.go:334] "Generic (PLEG): container finished" podID="683e1388-79ac-4d9c-8efb-60171477f47d" containerID="efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206" exitCode=0 Oct 03 13:52:50 crc kubenswrapper[4861]: I1003 13:52:50.349746 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerDied","Data":"e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150"} Oct 03 13:52:50 crc kubenswrapper[4861]: I1003 13:52:50.349771 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerDied","Data":"aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e"} Oct 03 13:52:50 crc kubenswrapper[4861]: I1003 13:52:50.349781 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerDied","Data":"efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206"} Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.139150 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185086 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-sg-core-conf-yaml\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185223 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-run-httpd\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185297 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-scripts\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185327 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wx5pk\" (UniqueName: \"kubernetes.io/projected/683e1388-79ac-4d9c-8efb-60171477f47d-kube-api-access-wx5pk\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185354 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-config-data\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185375 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-combined-ca-bundle\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185410 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-log-httpd\") pod \"683e1388-79ac-4d9c-8efb-60171477f47d\" (UID: \"683e1388-79ac-4d9c-8efb-60171477f47d\") " Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.185719 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.186210 4861 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.188147 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.213351 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-scripts" (OuterVolumeSpecName: "scripts") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.213419 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/683e1388-79ac-4d9c-8efb-60171477f47d-kube-api-access-wx5pk" (OuterVolumeSpecName: "kube-api-access-wx5pk") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "kube-api-access-wx5pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.219114 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.266765 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.283840 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-config-data" (OuterVolumeSpecName: "config-data") pod "683e1388-79ac-4d9c-8efb-60171477f47d" (UID: "683e1388-79ac-4d9c-8efb-60171477f47d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.287987 4861 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.288010 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.288021 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wx5pk\" (UniqueName: \"kubernetes.io/projected/683e1388-79ac-4d9c-8efb-60171477f47d-kube-api-access-wx5pk\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.288033 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.288046 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/683e1388-79ac-4d9c-8efb-60171477f47d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.288055 4861 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/683e1388-79ac-4d9c-8efb-60171477f47d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.377549 4861 generic.go:334] "Generic (PLEG): container finished" podID="683e1388-79ac-4d9c-8efb-60171477f47d" containerID="8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f" exitCode=0 Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.377626 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerDied","Data":"8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f"} Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.377657 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"683e1388-79ac-4d9c-8efb-60171477f47d","Type":"ContainerDied","Data":"1957850d52ee2cc41f88884b012ea7eec36898a3f029fdaf7bcf1bfd90d2161e"} Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.377674 4861 scope.go:117] "RemoveContainer" containerID="e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.377810 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.408657 4861 scope.go:117] "RemoveContainer" containerID="aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.410173 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.417772 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.429344 4861 scope.go:117] "RemoveContainer" containerID="efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.443693 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.444368 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-central-agent" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.444503 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-central-agent" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.444602 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="proxy-httpd" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.444688 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="proxy-httpd" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.444764 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="sg-core" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.444823 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="sg-core" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.444886 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-notification-agent" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.444946 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-notification-agent" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.445152 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-notification-agent" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.445221 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="proxy-httpd" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.445314 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="ceilometer-central-agent" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.445388 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" containerName="sg-core" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.448281 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.450181 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.450511 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.458913 4861 scope.go:117] "RemoveContainer" containerID="8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.488214 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492079 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-log-httpd\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492139 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492169 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-run-httpd\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492483 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8jtx\" (UniqueName: \"kubernetes.io/projected/7139b71d-8825-4675-b435-a0e359d2e5c7-kube-api-access-m8jtx\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492636 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-config-data\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492803 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.492856 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-scripts\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.493817 4861 scope.go:117] "RemoveContainer" containerID="e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.494354 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150\": container with ID starting with e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150 not found: ID does not exist" containerID="e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.494397 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150"} err="failed to get container status \"e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150\": rpc error: code = NotFound desc = could not find container \"e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150\": container with ID starting with e87d7a7e46c28100ef7b8c1aad65415967a4d2eda8ee0c6991d758f7053bc150 not found: ID does not exist" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.494426 4861 scope.go:117] "RemoveContainer" containerID="aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.494754 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e\": container with ID starting with aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e not found: ID does not exist" containerID="aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.494785 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e"} err="failed to get container status \"aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e\": rpc error: code = NotFound desc = could not find container \"aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e\": container with ID starting with aed4440d9527ab2a4c1e80bece9c25232cd60e08d757d8656e9fe67ccc69be2e not found: ID does not exist" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.494806 4861 scope.go:117] "RemoveContainer" containerID="efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.494998 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206\": container with ID starting with efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206 not found: ID does not exist" containerID="efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.495023 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206"} err="failed to get container status \"efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206\": rpc error: code = NotFound desc = could not find container \"efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206\": container with ID starting with efea940d0a98b5f57f1ae1fe1a906fc098e6b05e7b489d6d687745ff9cb6f206 not found: ID does not exist" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.495041 4861 scope.go:117] "RemoveContainer" containerID="8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f" Oct 03 13:52:53 crc kubenswrapper[4861]: E1003 13:52:53.495216 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f\": container with ID starting with 8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f not found: ID does not exist" containerID="8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.495262 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f"} err="failed to get container status \"8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f\": rpc error: code = NotFound desc = could not find container \"8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f\": container with ID starting with 8268285c3583426b2daf73c70192d14b8a4ee7a93cd1828c60ce3b2bc5148e8f not found: ID does not exist" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594712 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8jtx\" (UniqueName: \"kubernetes.io/projected/7139b71d-8825-4675-b435-a0e359d2e5c7-kube-api-access-m8jtx\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594789 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-config-data\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594846 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594874 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-scripts\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594896 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-log-httpd\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594912 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.594930 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-run-httpd\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.595580 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-run-httpd\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.595596 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-log-httpd\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.598081 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-scripts\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.598731 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.599847 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-config-data\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.600599 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.615589 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8jtx\" (UniqueName: \"kubernetes.io/projected/7139b71d-8825-4675-b435-a0e359d2e5c7-kube-api-access-m8jtx\") pod \"ceilometer-0\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " pod="openstack/ceilometer-0" Oct 03 13:52:53 crc kubenswrapper[4861]: I1003 13:52:53.769337 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:52:54 crc kubenswrapper[4861]: I1003 13:52:54.230771 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:52:54 crc kubenswrapper[4861]: I1003 13:52:54.238897 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:52:54 crc kubenswrapper[4861]: I1003 13:52:54.388805 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerStarted","Data":"1a44b53eaa922545b99a6b5b819c254813f1ccd65327305b03b51f2703757e22"} Oct 03 13:52:54 crc kubenswrapper[4861]: I1003 13:52:54.691297 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="683e1388-79ac-4d9c-8efb-60171477f47d" path="/var/lib/kubelet/pods/683e1388-79ac-4d9c-8efb-60171477f47d/volumes" Oct 03 13:52:55 crc kubenswrapper[4861]: I1003 13:52:55.398145 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerStarted","Data":"f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f"} Oct 03 13:52:56 crc kubenswrapper[4861]: I1003 13:52:56.409855 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerStarted","Data":"9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b"} Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.426077 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerStarted","Data":"25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba"} Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.429270 4861 generic.go:334] "Generic (PLEG): container finished" podID="c589e11a-4953-46ec-aeff-a83f6557421f" containerID="963ef2ef3f5f426b3763350ce9604beea4e5c0db8da7c36621492d44753ff880" exitCode=137 Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.429435 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerDied","Data":"963ef2ef3f5f426b3763350ce9604beea4e5c0db8da7c36621492d44753ff880"} Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.775910 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968073 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-config-data\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968169 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-secret-key\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968221 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c589e11a-4953-46ec-aeff-a83f6557421f-logs\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968304 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-tls-certs\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968335 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-combined-ca-bundle\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968363 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2czpt\" (UniqueName: \"kubernetes.io/projected/c589e11a-4953-46ec-aeff-a83f6557421f-kube-api-access-2czpt\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.968410 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-scripts\") pod \"c589e11a-4953-46ec-aeff-a83f6557421f\" (UID: \"c589e11a-4953-46ec-aeff-a83f6557421f\") " Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.969847 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c589e11a-4953-46ec-aeff-a83f6557421f-logs" (OuterVolumeSpecName: "logs") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.977384 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.977522 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c589e11a-4953-46ec-aeff-a83f6557421f-kube-api-access-2czpt" (OuterVolumeSpecName: "kube-api-access-2czpt") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "kube-api-access-2czpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:57 crc kubenswrapper[4861]: I1003 13:52:57.993446 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-scripts" (OuterVolumeSpecName: "scripts") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.016709 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.034969 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-config-data" (OuterVolumeSpecName: "config-data") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.045323 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "c589e11a-4953-46ec-aeff-a83f6557421f" (UID: "c589e11a-4953-46ec-aeff-a83f6557421f"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080889 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c589e11a-4953-46ec-aeff-a83f6557421f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080923 4861 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080934 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080943 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2czpt\" (UniqueName: \"kubernetes.io/projected/c589e11a-4953-46ec-aeff-a83f6557421f-kube-api-access-2czpt\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080953 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080961 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c589e11a-4953-46ec-aeff-a83f6557421f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.080970 4861 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c589e11a-4953-46ec-aeff-a83f6557421f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.439439 4861 generic.go:334] "Generic (PLEG): container finished" podID="a88488b7-b658-4fc0-8d27-7f69eb12d4c4" containerID="7aed167b995159e78a5930e172770220f5bac8d469b0f6e2586d96846bfc6b93" exitCode=0 Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.439854 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" event={"ID":"a88488b7-b658-4fc0-8d27-7f69eb12d4c4","Type":"ContainerDied","Data":"7aed167b995159e78a5930e172770220f5bac8d469b0f6e2586d96846bfc6b93"} Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.442841 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerStarted","Data":"a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8"} Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.443018 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.444745 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84cdb7b9dd-jhc2h" event={"ID":"c589e11a-4953-46ec-aeff-a83f6557421f","Type":"ContainerDied","Data":"6e75197dcad9146f3d5006e8e847a34ae7cfdcff13b7ff1da0ae4e264cd7cb12"} Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.444812 4861 scope.go:117] "RemoveContainer" containerID="65774e93f7a18a88876b7eea12fb7794958ba568a544b650edc95fff9801a980" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.444980 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84cdb7b9dd-jhc2h" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.493574 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.520140153 podStartE2EDuration="5.49355568s" podCreationTimestamp="2025-10-03 13:52:53 +0000 UTC" firstStartedPulling="2025-10-03 13:52:54.238647783 +0000 UTC m=+1288.236632830" lastFinishedPulling="2025-10-03 13:52:58.21206331 +0000 UTC m=+1292.210048357" observedRunningTime="2025-10-03 13:52:58.484802246 +0000 UTC m=+1292.482787293" watchObservedRunningTime="2025-10-03 13:52:58.49355568 +0000 UTC m=+1292.491540727" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.520848 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84cdb7b9dd-jhc2h"] Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.529527 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-84cdb7b9dd-jhc2h"] Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.642336 4861 scope.go:117] "RemoveContainer" containerID="963ef2ef3f5f426b3763350ce9604beea4e5c0db8da7c36621492d44753ff880" Oct 03 13:52:58 crc kubenswrapper[4861]: I1003 13:52:58.703950 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" path="/var/lib/kubelet/pods/c589e11a-4953-46ec-aeff-a83f6557421f/volumes" Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.837558 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.919443 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-scripts\") pod \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.919567 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-combined-ca-bundle\") pod \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.919598 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5xhk\" (UniqueName: \"kubernetes.io/projected/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-kube-api-access-d5xhk\") pod \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.920524 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-config-data\") pod \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\" (UID: \"a88488b7-b658-4fc0-8d27-7f69eb12d4c4\") " Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.936166 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-kube-api-access-d5xhk" (OuterVolumeSpecName: "kube-api-access-d5xhk") pod "a88488b7-b658-4fc0-8d27-7f69eb12d4c4" (UID: "a88488b7-b658-4fc0-8d27-7f69eb12d4c4"). InnerVolumeSpecName "kube-api-access-d5xhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.953802 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-scripts" (OuterVolumeSpecName: "scripts") pod "a88488b7-b658-4fc0-8d27-7f69eb12d4c4" (UID: "a88488b7-b658-4fc0-8d27-7f69eb12d4c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.971456 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-config-data" (OuterVolumeSpecName: "config-data") pod "a88488b7-b658-4fc0-8d27-7f69eb12d4c4" (UID: "a88488b7-b658-4fc0-8d27-7f69eb12d4c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:52:59 crc kubenswrapper[4861]: I1003 13:52:59.979919 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a88488b7-b658-4fc0-8d27-7f69eb12d4c4" (UID: "a88488b7-b658-4fc0-8d27-7f69eb12d4c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.022918 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.022947 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.022959 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5xhk\" (UniqueName: \"kubernetes.io/projected/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-kube-api-access-d5xhk\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.022970 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a88488b7-b658-4fc0-8d27-7f69eb12d4c4-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.145103 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.145167 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.464822 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" event={"ID":"a88488b7-b658-4fc0-8d27-7f69eb12d4c4","Type":"ContainerDied","Data":"a5e1b97ef93c095862a9aca7cf0b65962e168e77f476fd24689ccd743eb82f8b"} Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.464864 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5e1b97ef93c095862a9aca7cf0b65962e168e77f476fd24689ccd743eb82f8b" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.464956 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wk4nw" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601191 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 13:53:00 crc kubenswrapper[4861]: E1003 13:53:00.601597 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601617 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" Oct 03 13:53:00 crc kubenswrapper[4861]: E1003 13:53:00.601634 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88488b7-b658-4fc0-8d27-7f69eb12d4c4" containerName="nova-cell0-conductor-db-sync" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601642 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88488b7-b658-4fc0-8d27-7f69eb12d4c4" containerName="nova-cell0-conductor-db-sync" Oct 03 13:53:00 crc kubenswrapper[4861]: E1003 13:53:00.601666 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601674 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" Oct 03 13:53:00 crc kubenswrapper[4861]: E1003 13:53:00.601700 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon-log" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601705 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon-log" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601890 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601906 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601917 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88488b7-b658-4fc0-8d27-7f69eb12d4c4" containerName="nova-cell0-conductor-db-sync" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.601942 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c589e11a-4953-46ec-aeff-a83f6557421f" containerName="horizon-log" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.602620 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.604490 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.604955 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7sksh" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.632537 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bb7c95-25e1-49b2-b659-2af6e5354749-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.632585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bwvw\" (UniqueName: \"kubernetes.io/projected/04bb7c95-25e1-49b2-b659-2af6e5354749-kube-api-access-4bwvw\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.632868 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bb7c95-25e1-49b2-b659-2af6e5354749-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.634868 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.734561 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bb7c95-25e1-49b2-b659-2af6e5354749-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.734984 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bb7c95-25e1-49b2-b659-2af6e5354749-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.735093 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bwvw\" (UniqueName: \"kubernetes.io/projected/04bb7c95-25e1-49b2-b659-2af6e5354749-kube-api-access-4bwvw\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.740827 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bb7c95-25e1-49b2-b659-2af6e5354749-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.752702 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bwvw\" (UniqueName: \"kubernetes.io/projected/04bb7c95-25e1-49b2-b659-2af6e5354749-kube-api-access-4bwvw\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.752926 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bb7c95-25e1-49b2-b659-2af6e5354749-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"04bb7c95-25e1-49b2-b659-2af6e5354749\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:00 crc kubenswrapper[4861]: I1003 13:53:00.917973 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:01 crc kubenswrapper[4861]: I1003 13:53:01.396164 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 13:53:01 crc kubenswrapper[4861]: W1003 13:53:01.424393 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04bb7c95_25e1_49b2_b659_2af6e5354749.slice/crio-bd0620468f3460340e640e062dc7852499469b0169f07bf3219011733a317ac6 WatchSource:0}: Error finding container bd0620468f3460340e640e062dc7852499469b0169f07bf3219011733a317ac6: Status 404 returned error can't find the container with id bd0620468f3460340e640e062dc7852499469b0169f07bf3219011733a317ac6 Oct 03 13:53:01 crc kubenswrapper[4861]: I1003 13:53:01.475163 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"04bb7c95-25e1-49b2-b659-2af6e5354749","Type":"ContainerStarted","Data":"bd0620468f3460340e640e062dc7852499469b0169f07bf3219011733a317ac6"} Oct 03 13:53:02 crc kubenswrapper[4861]: I1003 13:53:02.483939 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"04bb7c95-25e1-49b2-b659-2af6e5354749","Type":"ContainerStarted","Data":"c48315a32337e370544009b6e16e7c8581ef4e04045aa29736a7da89c456b9f7"} Oct 03 13:53:02 crc kubenswrapper[4861]: I1003 13:53:02.484314 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:02 crc kubenswrapper[4861]: I1003 13:53:02.503485 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.503465067 podStartE2EDuration="2.503465067s" podCreationTimestamp="2025-10-03 13:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:02.499631945 +0000 UTC m=+1296.497617002" watchObservedRunningTime="2025-10-03 13:53:02.503465067 +0000 UTC m=+1296.501450114" Oct 03 13:53:10 crc kubenswrapper[4861]: I1003 13:53:10.944855 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.482840 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-ngd8x"] Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.484402 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.486702 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.488743 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.494750 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ngd8x"] Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.627301 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxjt8\" (UniqueName: \"kubernetes.io/projected/0acf7c1b-380a-44b8-8542-6610bbc1f700-kube-api-access-zxjt8\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.627352 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-config-data\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.627410 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.627437 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-scripts\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.703246 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.709797 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.714661 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.726832 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.730255 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxjt8\" (UniqueName: \"kubernetes.io/projected/0acf7c1b-380a-44b8-8542-6610bbc1f700-kube-api-access-zxjt8\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.730299 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-config-data\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.730395 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.747023 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.751444 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-config-data\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.758352 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-scripts\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.824276 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxjt8\" (UniqueName: \"kubernetes.io/projected/0acf7c1b-380a-44b8-8542-6610bbc1f700-kube-api-access-zxjt8\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.841804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-scripts\") pod \"nova-cell0-cell-mapping-ngd8x\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.899318 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkphj\" (UniqueName: \"kubernetes.io/projected/199e3032-23d3-4344-8e6c-67be90c68ea7-kube-api-access-qkphj\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.920570 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/199e3032-23d3-4344-8e6c-67be90c68ea7-logs\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.920890 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.921012 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-config-data\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.937401 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.938666 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.942745 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 13:53:11 crc kubenswrapper[4861]: I1003 13:53:11.979007 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027419 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/199e3032-23d3-4344-8e6c-67be90c68ea7-logs\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027495 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027519 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-config-data\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027542 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-config-data\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027562 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027637 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkphj\" (UniqueName: \"kubernetes.io/projected/199e3032-23d3-4344-8e6c-67be90c68ea7-kube-api-access-qkphj\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.027654 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtvnn\" (UniqueName: \"kubernetes.io/projected/92e600f4-f5d6-445a-aa5c-493b1eec92c8-kube-api-access-rtvnn\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.028034 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/199e3032-23d3-4344-8e6c-67be90c68ea7-logs\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.060307 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.061949 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.068032 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.069266 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.070160 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-config-data\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.073513 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-zlqcc"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.075060 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.079925 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkphj\" (UniqueName: \"kubernetes.io/projected/199e3032-23d3-4344-8e6c-67be90c68ea7-kube-api-access-qkphj\") pod \"nova-metadata-0\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.099601 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.125627 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.126492 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-zlqcc"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.131272 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.131588 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.131796 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47fbe704-46a5-4313-ba09-06f613943f0a-logs\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.131960 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtvnn\" (UniqueName: \"kubernetes.io/projected/92e600f4-f5d6-445a-aa5c-493b1eec92c8-kube-api-access-rtvnn\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.132627 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.132841 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133032 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb6zh\" (UniqueName: \"kubernetes.io/projected/47fbe704-46a5-4313-ba09-06f613943f0a-kube-api-access-qb6zh\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133217 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-config-data\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133358 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl6vd\" (UniqueName: \"kubernetes.io/projected/db0287ff-e926-428b-a4e0-4dd1e3b40b66-kube-api-access-sl6vd\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133485 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-config-data\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133581 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-config\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133668 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.133772 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.138959 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-config-data\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.155986 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.163795 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtvnn\" (UniqueName: \"kubernetes.io/projected/92e600f4-f5d6-445a-aa5c-493b1eec92c8-kube-api-access-rtvnn\") pod \"nova-scheduler-0\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236047 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-config-data\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236100 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl6vd\" (UniqueName: \"kubernetes.io/projected/db0287ff-e926-428b-a4e0-4dd1e3b40b66-kube-api-access-sl6vd\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236137 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-config\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236159 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236208 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236261 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236279 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47fbe704-46a5-4313-ba09-06f613943f0a-logs\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236329 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236372 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.236417 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb6zh\" (UniqueName: \"kubernetes.io/projected/47fbe704-46a5-4313-ba09-06f613943f0a-kube-api-access-qb6zh\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.237524 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-config\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.238214 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.238419 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47fbe704-46a5-4313-ba09-06f613943f0a-logs\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.239295 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.240001 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.245120 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-config-data\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.254027 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.257536 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.262556 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb6zh\" (UniqueName: \"kubernetes.io/projected/47fbe704-46a5-4313-ba09-06f613943f0a-kube-api-access-qb6zh\") pod \"nova-api-0\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.287459 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl6vd\" (UniqueName: \"kubernetes.io/projected/db0287ff-e926-428b-a4e0-4dd1e3b40b66-kube-api-access-sl6vd\") pod \"dnsmasq-dns-845d6d6f59-zlqcc\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.291766 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.299738 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.302166 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.308224 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.321826 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.332361 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.338173 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.338549 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.338691 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd2ml\" (UniqueName: \"kubernetes.io/projected/344b366c-9eb6-4732-be14-9d20dfd27336-kube-api-access-xd2ml\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.440103 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd2ml\" (UniqueName: \"kubernetes.io/projected/344b366c-9eb6-4732-be14-9d20dfd27336-kube-api-access-xd2ml\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.440562 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.440710 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.450100 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.450600 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.469348 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd2ml\" (UniqueName: \"kubernetes.io/projected/344b366c-9eb6-4732-be14-9d20dfd27336-kube-api-access-xd2ml\") pod \"nova-cell1-novncproxy-0\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.472990 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.522850 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.671525 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:12 crc kubenswrapper[4861]: I1003 13:53:12.902499 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ngd8x"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.035626 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.099635 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hgvnj"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.100770 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.105437 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.105642 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.124644 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hgvnj"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.167958 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.167996 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s9fd\" (UniqueName: \"kubernetes.io/projected/f57ff497-fc3f-4872-b1e3-3927895d7c6c-kube-api-access-2s9fd\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.168102 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-config-data\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.168131 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-scripts\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.250978 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.273652 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.273695 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s9fd\" (UniqueName: \"kubernetes.io/projected/f57ff497-fc3f-4872-b1e3-3927895d7c6c-kube-api-access-2s9fd\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.273787 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-config-data\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.273814 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-scripts\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.281524 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.282002 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-config-data\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.283544 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-scripts\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.309942 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s9fd\" (UniqueName: \"kubernetes.io/projected/f57ff497-fc3f-4872-b1e3-3927895d7c6c-kube-api-access-2s9fd\") pod \"nova-cell1-conductor-db-sync-hgvnj\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.311089 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:13 crc kubenswrapper[4861]: W1003 13:53:13.317726 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47fbe704_46a5_4313_ba09_06f613943f0a.slice/crio-7f8d2d68028438f371854b4a32f146ae7240f0ad27695e5b0d7ae6fbb44728e8 WatchSource:0}: Error finding container 7f8d2d68028438f371854b4a32f146ae7240f0ad27695e5b0d7ae6fbb44728e8: Status 404 returned error can't find the container with id 7f8d2d68028438f371854b4a32f146ae7240f0ad27695e5b0d7ae6fbb44728e8 Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.442249 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.547719 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-zlqcc"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.589023 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.607436 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" event={"ID":"db0287ff-e926-428b-a4e0-4dd1e3b40b66","Type":"ContainerStarted","Data":"5c9c495a1859c49850187d9c60ea79a2ef821e94017b7f47088c828c3f0acef6"} Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.609571 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47fbe704-46a5-4313-ba09-06f613943f0a","Type":"ContainerStarted","Data":"7f8d2d68028438f371854b4a32f146ae7240f0ad27695e5b0d7ae6fbb44728e8"} Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.613723 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"199e3032-23d3-4344-8e6c-67be90c68ea7","Type":"ContainerStarted","Data":"282d99ce2da2e1b5945fd4a7d82331a3a8893be2bd17fb370b7c9a9911c9a6ec"} Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.617134 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92e600f4-f5d6-445a-aa5c-493b1eec92c8","Type":"ContainerStarted","Data":"5e836593ae0dc3e0ff94e1b96820606e3a700c14726d3f3315fad29fc5312f2b"} Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.640932 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ngd8x" event={"ID":"0acf7c1b-380a-44b8-8542-6610bbc1f700","Type":"ContainerStarted","Data":"b52a454d127d124d10680549c4755bea60436c80787cda5e54c997dd4d6f67e5"} Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.640978 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ngd8x" event={"ID":"0acf7c1b-380a-44b8-8542-6610bbc1f700","Type":"ContainerStarted","Data":"31f3850b270ac0e3804daf17386e8413c9d35deca3a111ed6fc3993e423c00e5"} Oct 03 13:53:13 crc kubenswrapper[4861]: I1003 13:53:13.667798 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-ngd8x" podStartSLOduration=2.667780134 podStartE2EDuration="2.667780134s" podCreationTimestamp="2025-10-03 13:53:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:13.66387318 +0000 UTC m=+1307.661858227" watchObservedRunningTime="2025-10-03 13:53:13.667780134 +0000 UTC m=+1307.665765181" Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.050012 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hgvnj"] Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.656323 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" event={"ID":"f57ff497-fc3f-4872-b1e3-3927895d7c6c","Type":"ContainerStarted","Data":"9daf7e884343bfddc03defa9cfeb515f90dae102ec52f788ed18250bb430cb2a"} Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.656619 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" event={"ID":"f57ff497-fc3f-4872-b1e3-3927895d7c6c","Type":"ContainerStarted","Data":"f2e3a35e0f92649dd1e31a09fff676c40df1056591f33f48d56b8ebc64cc3cfb"} Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.660867 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"344b366c-9eb6-4732-be14-9d20dfd27336","Type":"ContainerStarted","Data":"0ffb80b26a81083af149d21cfdc4057c1fed679b369255bc56535745dbe5ce45"} Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.664324 4861 generic.go:334] "Generic (PLEG): container finished" podID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerID="f3e2aa3585fa89ee119400a031ddc6fdd92cd4c4f4bf88bf7db0082c7bdf1f7a" exitCode=0 Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.665324 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" event={"ID":"db0287ff-e926-428b-a4e0-4dd1e3b40b66","Type":"ContainerDied","Data":"f3e2aa3585fa89ee119400a031ddc6fdd92cd4c4f4bf88bf7db0082c7bdf1f7a"} Oct 03 13:53:14 crc kubenswrapper[4861]: I1003 13:53:14.679989 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" podStartSLOduration=1.679973005 podStartE2EDuration="1.679973005s" podCreationTimestamp="2025-10-03 13:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:14.677075278 +0000 UTC m=+1308.675060325" watchObservedRunningTime="2025-10-03 13:53:14.679973005 +0000 UTC m=+1308.677958052" Oct 03 13:53:15 crc kubenswrapper[4861]: I1003 13:53:15.689897 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" event={"ID":"db0287ff-e926-428b-a4e0-4dd1e3b40b66","Type":"ContainerStarted","Data":"6f940b57e91d29c6d4b325f3701d47acbf9d0f02f8d006df7ac7a1639566303c"} Oct 03 13:53:15 crc kubenswrapper[4861]: I1003 13:53:15.689967 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:15 crc kubenswrapper[4861]: I1003 13:53:15.890179 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" podStartSLOduration=4.8901587 podStartE2EDuration="4.8901587s" podCreationTimestamp="2025-10-03 13:53:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:15.724652138 +0000 UTC m=+1309.722637195" watchObservedRunningTime="2025-10-03 13:53:15.8901587 +0000 UTC m=+1309.888143747" Oct 03 13:53:15 crc kubenswrapper[4861]: I1003 13:53:15.901729 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:15 crc kubenswrapper[4861]: I1003 13:53:15.927591 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.767935 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"344b366c-9eb6-4732-be14-9d20dfd27336","Type":"ContainerStarted","Data":"352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1"} Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.768291 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="344b366c-9eb6-4732-be14-9d20dfd27336" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1" gracePeriod=30 Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.770262 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47fbe704-46a5-4313-ba09-06f613943f0a","Type":"ContainerStarted","Data":"15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782"} Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.770299 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47fbe704-46a5-4313-ba09-06f613943f0a","Type":"ContainerStarted","Data":"b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670"} Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.774995 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"199e3032-23d3-4344-8e6c-67be90c68ea7","Type":"ContainerStarted","Data":"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7"} Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.775086 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"199e3032-23d3-4344-8e6c-67be90c68ea7","Type":"ContainerStarted","Data":"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440"} Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.775301 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-log" containerID="cri-o://de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440" gracePeriod=30 Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.775463 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-metadata" containerID="cri-o://051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7" gracePeriod=30 Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.779630 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92e600f4-f5d6-445a-aa5c-493b1eec92c8","Type":"ContainerStarted","Data":"d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572"} Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.802615 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.6694541259999998 podStartE2EDuration="7.802592015s" podCreationTimestamp="2025-10-03 13:53:12 +0000 UTC" firstStartedPulling="2025-10-03 13:53:13.598290151 +0000 UTC m=+1307.596275198" lastFinishedPulling="2025-10-03 13:53:18.73142804 +0000 UTC m=+1312.729413087" observedRunningTime="2025-10-03 13:53:19.795269171 +0000 UTC m=+1313.793254218" watchObservedRunningTime="2025-10-03 13:53:19.802592015 +0000 UTC m=+1313.800577062" Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.829191 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.144107384 podStartE2EDuration="8.82916017s" podCreationTimestamp="2025-10-03 13:53:11 +0000 UTC" firstStartedPulling="2025-10-03 13:53:13.044841873 +0000 UTC m=+1307.042826920" lastFinishedPulling="2025-10-03 13:53:18.729894659 +0000 UTC m=+1312.727879706" observedRunningTime="2025-10-03 13:53:19.820915922 +0000 UTC m=+1313.818900969" watchObservedRunningTime="2025-10-03 13:53:19.82916017 +0000 UTC m=+1313.827145217" Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.847128 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.3628889109999998 podStartE2EDuration="8.847104897s" podCreationTimestamp="2025-10-03 13:53:11 +0000 UTC" firstStartedPulling="2025-10-03 13:53:13.251650211 +0000 UTC m=+1307.249635258" lastFinishedPulling="2025-10-03 13:53:18.735866187 +0000 UTC m=+1312.733851244" observedRunningTime="2025-10-03 13:53:19.844918838 +0000 UTC m=+1313.842903885" watchObservedRunningTime="2025-10-03 13:53:19.847104897 +0000 UTC m=+1313.845089944" Oct 03 13:53:19 crc kubenswrapper[4861]: I1003 13:53:19.871637 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.466932802 podStartE2EDuration="8.871618697s" podCreationTimestamp="2025-10-03 13:53:11 +0000 UTC" firstStartedPulling="2025-10-03 13:53:13.324019732 +0000 UTC m=+1307.322004779" lastFinishedPulling="2025-10-03 13:53:18.728705627 +0000 UTC m=+1312.726690674" observedRunningTime="2025-10-03 13:53:19.869599954 +0000 UTC m=+1313.867585001" watchObservedRunningTime="2025-10-03 13:53:19.871618697 +0000 UTC m=+1313.869603744" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.518331 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.605348 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkphj\" (UniqueName: \"kubernetes.io/projected/199e3032-23d3-4344-8e6c-67be90c68ea7-kube-api-access-qkphj\") pod \"199e3032-23d3-4344-8e6c-67be90c68ea7\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.605466 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-combined-ca-bundle\") pod \"199e3032-23d3-4344-8e6c-67be90c68ea7\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.605550 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/199e3032-23d3-4344-8e6c-67be90c68ea7-logs\") pod \"199e3032-23d3-4344-8e6c-67be90c68ea7\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.605610 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-config-data\") pod \"199e3032-23d3-4344-8e6c-67be90c68ea7\" (UID: \"199e3032-23d3-4344-8e6c-67be90c68ea7\") " Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.606519 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/199e3032-23d3-4344-8e6c-67be90c68ea7-logs" (OuterVolumeSpecName: "logs") pod "199e3032-23d3-4344-8e6c-67be90c68ea7" (UID: "199e3032-23d3-4344-8e6c-67be90c68ea7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.615491 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/199e3032-23d3-4344-8e6c-67be90c68ea7-kube-api-access-qkphj" (OuterVolumeSpecName: "kube-api-access-qkphj") pod "199e3032-23d3-4344-8e6c-67be90c68ea7" (UID: "199e3032-23d3-4344-8e6c-67be90c68ea7"). InnerVolumeSpecName "kube-api-access-qkphj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.641162 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-config-data" (OuterVolumeSpecName: "config-data") pod "199e3032-23d3-4344-8e6c-67be90c68ea7" (UID: "199e3032-23d3-4344-8e6c-67be90c68ea7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.643532 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "199e3032-23d3-4344-8e6c-67be90c68ea7" (UID: "199e3032-23d3-4344-8e6c-67be90c68ea7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.721330 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.721368 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/199e3032-23d3-4344-8e6c-67be90c68ea7-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.721378 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199e3032-23d3-4344-8e6c-67be90c68ea7-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.721391 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkphj\" (UniqueName: \"kubernetes.io/projected/199e3032-23d3-4344-8e6c-67be90c68ea7-kube-api-access-qkphj\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.791091 4861 generic.go:334] "Generic (PLEG): container finished" podID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerID="051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7" exitCode=0 Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.791315 4861 generic.go:334] "Generic (PLEG): container finished" podID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerID="de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440" exitCode=143 Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.792353 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.793108 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"199e3032-23d3-4344-8e6c-67be90c68ea7","Type":"ContainerDied","Data":"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7"} Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.793155 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"199e3032-23d3-4344-8e6c-67be90c68ea7","Type":"ContainerDied","Data":"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440"} Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.793173 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"199e3032-23d3-4344-8e6c-67be90c68ea7","Type":"ContainerDied","Data":"282d99ce2da2e1b5945fd4a7d82331a3a8893be2bd17fb370b7c9a9911c9a6ec"} Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.793192 4861 scope.go:117] "RemoveContainer" containerID="051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.829409 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.840065 4861 scope.go:117] "RemoveContainer" containerID="de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.857862 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.891160 4861 scope.go:117] "RemoveContainer" containerID="051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7" Oct 03 13:53:20 crc kubenswrapper[4861]: E1003 13:53:20.892687 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7\": container with ID starting with 051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7 not found: ID does not exist" containerID="051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.892714 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7"} err="failed to get container status \"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7\": rpc error: code = NotFound desc = could not find container \"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7\": container with ID starting with 051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7 not found: ID does not exist" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.892733 4861 scope.go:117] "RemoveContainer" containerID="de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440" Oct 03 13:53:20 crc kubenswrapper[4861]: E1003 13:53:20.894439 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440\": container with ID starting with de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440 not found: ID does not exist" containerID="de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.894491 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440"} err="failed to get container status \"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440\": rpc error: code = NotFound desc = could not find container \"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440\": container with ID starting with de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440 not found: ID does not exist" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.894528 4861 scope.go:117] "RemoveContainer" containerID="051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.901290 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7"} err="failed to get container status \"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7\": rpc error: code = NotFound desc = could not find container \"051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7\": container with ID starting with 051fb49c91743c035e08b126319cf7359d9be5d54230c5004fc51e0c27ab17a7 not found: ID does not exist" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.901335 4861 scope.go:117] "RemoveContainer" containerID="de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.901741 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440"} err="failed to get container status \"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440\": rpc error: code = NotFound desc = could not find container \"de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440\": container with ID starting with de2e034a8f91b55ec6531dea88ca5c290cc948c797980c912abd2d558fa41440 not found: ID does not exist" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.908044 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:20 crc kubenswrapper[4861]: E1003 13:53:20.908505 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-log" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.908528 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-log" Oct 03 13:53:20 crc kubenswrapper[4861]: E1003 13:53:20.908542 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-metadata" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.908551 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-metadata" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.908777 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-metadata" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.908796 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" containerName="nova-metadata-log" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.909718 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.911974 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.912258 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 13:53:20 crc kubenswrapper[4861]: I1003 13:53:20.929420 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.031593 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgsj6\" (UniqueName: \"kubernetes.io/projected/0d14ab87-b6a3-4c72-b82c-6accc45b650f-kube-api-access-zgsj6\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.031698 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d14ab87-b6a3-4c72-b82c-6accc45b650f-logs\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.031825 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.031854 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-config-data\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.031880 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.134122 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-config-data\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.134183 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.134258 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgsj6\" (UniqueName: \"kubernetes.io/projected/0d14ab87-b6a3-4c72-b82c-6accc45b650f-kube-api-access-zgsj6\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.134296 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d14ab87-b6a3-4c72-b82c-6accc45b650f-logs\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.134374 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.135340 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d14ab87-b6a3-4c72-b82c-6accc45b650f-logs\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.145087 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-config-data\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.146346 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.159212 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.163669 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgsj6\" (UniqueName: \"kubernetes.io/projected/0d14ab87-b6a3-4c72-b82c-6accc45b650f-kube-api-access-zgsj6\") pod \"nova-metadata-0\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.231172 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.737885 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:21 crc kubenswrapper[4861]: I1003 13:53:21.802856 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d14ab87-b6a3-4c72-b82c-6accc45b650f","Type":"ContainerStarted","Data":"32ce09aea0226ec3f14042b108465468c8a90c923fc2f6ed1d159fc5070f1a2f"} Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.293435 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.293763 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.329979 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.474188 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.475422 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.524409 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.617621 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-gd9mp"] Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.617843 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerName="dnsmasq-dns" containerID="cri-o://79be2c834c62a38f57e7a14bc42bb562338318719b58c3ab42257952f4c464a8" gracePeriod=10 Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.673006 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.693501 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="199e3032-23d3-4344-8e6c-67be90c68ea7" path="/var/lib/kubelet/pods/199e3032-23d3-4344-8e6c-67be90c68ea7/volumes" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.849625 4861 generic.go:334] "Generic (PLEG): container finished" podID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerID="79be2c834c62a38f57e7a14bc42bb562338318719b58c3ab42257952f4c464a8" exitCode=0 Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.849773 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" event={"ID":"709ebd79-e1bf-4e30-ba04-d1dc2c954398","Type":"ContainerDied","Data":"79be2c834c62a38f57e7a14bc42bb562338318719b58c3ab42257952f4c464a8"} Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.856451 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d14ab87-b6a3-4c72-b82c-6accc45b650f","Type":"ContainerStarted","Data":"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d"} Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.856501 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d14ab87-b6a3-4c72-b82c-6accc45b650f","Type":"ContainerStarted","Data":"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790"} Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.882085 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.882062625 podStartE2EDuration="2.882062625s" podCreationTimestamp="2025-10-03 13:53:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:22.87995382 +0000 UTC m=+1316.877938867" watchObservedRunningTime="2025-10-03 13:53:22.882062625 +0000 UTC m=+1316.880047692" Oct 03 13:53:22 crc kubenswrapper[4861]: I1003 13:53:22.923647 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.557498 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.557516 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.749724 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.817204 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.892964 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.893371 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-gd9mp" event={"ID":"709ebd79-e1bf-4e30-ba04-d1dc2c954398","Type":"ContainerDied","Data":"495ec299cccb0683c3ce5b0c2b4461d4239ac3525e6d6fbd0d35965f4608fdcd"} Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.893403 4861 scope.go:117] "RemoveContainer" containerID="79be2c834c62a38f57e7a14bc42bb562338318719b58c3ab42257952f4c464a8" Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.910208 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-config\") pod \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.910357 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-svc\") pod \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.910381 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-nb\") pod \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.910459 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl2sm\" (UniqueName: \"kubernetes.io/projected/709ebd79-e1bf-4e30-ba04-d1dc2c954398-kube-api-access-pl2sm\") pod \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.910583 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-swift-storage-0\") pod \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.910673 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-sb\") pod \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\" (UID: \"709ebd79-e1bf-4e30-ba04-d1dc2c954398\") " Oct 03 13:53:23 crc kubenswrapper[4861]: I1003 13:53:23.977432 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/709ebd79-e1bf-4e30-ba04-d1dc2c954398-kube-api-access-pl2sm" (OuterVolumeSpecName: "kube-api-access-pl2sm") pod "709ebd79-e1bf-4e30-ba04-d1dc2c954398" (UID: "709ebd79-e1bf-4e30-ba04-d1dc2c954398"). InnerVolumeSpecName "kube-api-access-pl2sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.041043 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl2sm\" (UniqueName: \"kubernetes.io/projected/709ebd79-e1bf-4e30-ba04-d1dc2c954398-kube-api-access-pl2sm\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.042495 4861 scope.go:117] "RemoveContainer" containerID="f1ccb5eedcb0d742d711b0df447cffb0ec2e1d8751a00163510ee76b1567fb11" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.047297 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "709ebd79-e1bf-4e30-ba04-d1dc2c954398" (UID: "709ebd79-e1bf-4e30-ba04-d1dc2c954398"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.089157 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "709ebd79-e1bf-4e30-ba04-d1dc2c954398" (UID: "709ebd79-e1bf-4e30-ba04-d1dc2c954398"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.147786 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.147828 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.206063 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "709ebd79-e1bf-4e30-ba04-d1dc2c954398" (UID: "709ebd79-e1bf-4e30-ba04-d1dc2c954398"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.239794 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "709ebd79-e1bf-4e30-ba04-d1dc2c954398" (UID: "709ebd79-e1bf-4e30-ba04-d1dc2c954398"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.252556 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.252599 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.263881 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-config" (OuterVolumeSpecName: "config") pod "709ebd79-e1bf-4e30-ba04-d1dc2c954398" (UID: "709ebd79-e1bf-4e30-ba04-d1dc2c954398"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.353853 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/709ebd79-e1bf-4e30-ba04-d1dc2c954398-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.522348 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-gd9mp"] Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.531974 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-gd9mp"] Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.693628 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" path="/var/lib/kubelet/pods/709ebd79-e1bf-4e30-ba04-d1dc2c954398/volumes" Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.903603 4861 generic.go:334] "Generic (PLEG): container finished" podID="0acf7c1b-380a-44b8-8542-6610bbc1f700" containerID="b52a454d127d124d10680549c4755bea60436c80787cda5e54c997dd4d6f67e5" exitCode=0 Oct 03 13:53:24 crc kubenswrapper[4861]: I1003 13:53:24.903647 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ngd8x" event={"ID":"0acf7c1b-380a-44b8-8542-6610bbc1f700","Type":"ContainerDied","Data":"b52a454d127d124d10680549c4755bea60436c80787cda5e54c997dd4d6f67e5"} Oct 03 13:53:25 crc kubenswrapper[4861]: I1003 13:53:25.915022 4861 generic.go:334] "Generic (PLEG): container finished" podID="f57ff497-fc3f-4872-b1e3-3927895d7c6c" containerID="9daf7e884343bfddc03defa9cfeb515f90dae102ec52f788ed18250bb430cb2a" exitCode=0 Oct 03 13:53:25 crc kubenswrapper[4861]: I1003 13:53:25.915115 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" event={"ID":"f57ff497-fc3f-4872-b1e3-3927895d7c6c","Type":"ContainerDied","Data":"9daf7e884343bfddc03defa9cfeb515f90dae102ec52f788ed18250bb430cb2a"} Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.231721 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.231983 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.339785 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.491514 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-scripts\") pod \"0acf7c1b-380a-44b8-8542-6610bbc1f700\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.492026 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxjt8\" (UniqueName: \"kubernetes.io/projected/0acf7c1b-380a-44b8-8542-6610bbc1f700-kube-api-access-zxjt8\") pod \"0acf7c1b-380a-44b8-8542-6610bbc1f700\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.492167 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-config-data\") pod \"0acf7c1b-380a-44b8-8542-6610bbc1f700\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.492592 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-combined-ca-bundle\") pod \"0acf7c1b-380a-44b8-8542-6610bbc1f700\" (UID: \"0acf7c1b-380a-44b8-8542-6610bbc1f700\") " Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.496548 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0acf7c1b-380a-44b8-8542-6610bbc1f700-kube-api-access-zxjt8" (OuterVolumeSpecName: "kube-api-access-zxjt8") pod "0acf7c1b-380a-44b8-8542-6610bbc1f700" (UID: "0acf7c1b-380a-44b8-8542-6610bbc1f700"). InnerVolumeSpecName "kube-api-access-zxjt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.501971 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-scripts" (OuterVolumeSpecName: "scripts") pod "0acf7c1b-380a-44b8-8542-6610bbc1f700" (UID: "0acf7c1b-380a-44b8-8542-6610bbc1f700"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.521265 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-config-data" (OuterVolumeSpecName: "config-data") pod "0acf7c1b-380a-44b8-8542-6610bbc1f700" (UID: "0acf7c1b-380a-44b8-8542-6610bbc1f700"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.540619 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0acf7c1b-380a-44b8-8542-6610bbc1f700" (UID: "0acf7c1b-380a-44b8-8542-6610bbc1f700"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.595166 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.595193 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxjt8\" (UniqueName: \"kubernetes.io/projected/0acf7c1b-380a-44b8-8542-6610bbc1f700-kube-api-access-zxjt8\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.595205 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.595214 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0acf7c1b-380a-44b8-8542-6610bbc1f700-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.925186 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ngd8x" event={"ID":"0acf7c1b-380a-44b8-8542-6610bbc1f700","Type":"ContainerDied","Data":"31f3850b270ac0e3804daf17386e8413c9d35deca3a111ed6fc3993e423c00e5"} Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.925627 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31f3850b270ac0e3804daf17386e8413c9d35deca3a111ed6fc3993e423c00e5" Oct 03 13:53:26 crc kubenswrapper[4861]: I1003 13:53:26.925218 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ngd8x" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.079434 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.079639 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" containerName="nova-scheduler-scheduler" containerID="cri-o://d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572" gracePeriod=30 Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.102134 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.102516 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-log" containerID="cri-o://b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670" gracePeriod=30 Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.102571 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-api" containerID="cri-o://15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782" gracePeriod=30 Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.112450 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.112916 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-log" containerID="cri-o://8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790" gracePeriod=30 Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.113202 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-metadata" containerID="cri-o://776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d" gracePeriod=30 Oct 03 13:53:27 crc kubenswrapper[4861]: E1003 13:53:27.296461 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:53:27 crc kubenswrapper[4861]: E1003 13:53:27.298792 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:53:27 crc kubenswrapper[4861]: E1003 13:53:27.301449 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:53:27 crc kubenswrapper[4861]: E1003 13:53:27.301517 4861 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" containerName="nova-scheduler-scheduler" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.505901 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.614140 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s9fd\" (UniqueName: \"kubernetes.io/projected/f57ff497-fc3f-4872-b1e3-3927895d7c6c-kube-api-access-2s9fd\") pod \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.614309 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-combined-ca-bundle\") pod \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.614360 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-config-data\") pod \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.614402 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-scripts\") pod \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\" (UID: \"f57ff497-fc3f-4872-b1e3-3927895d7c6c\") " Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.629550 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-scripts" (OuterVolumeSpecName: "scripts") pod "f57ff497-fc3f-4872-b1e3-3927895d7c6c" (UID: "f57ff497-fc3f-4872-b1e3-3927895d7c6c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.638478 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f57ff497-fc3f-4872-b1e3-3927895d7c6c-kube-api-access-2s9fd" (OuterVolumeSpecName: "kube-api-access-2s9fd") pod "f57ff497-fc3f-4872-b1e3-3927895d7c6c" (UID: "f57ff497-fc3f-4872-b1e3-3927895d7c6c"). InnerVolumeSpecName "kube-api-access-2s9fd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.692384 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-config-data" (OuterVolumeSpecName: "config-data") pod "f57ff497-fc3f-4872-b1e3-3927895d7c6c" (UID: "f57ff497-fc3f-4872-b1e3-3927895d7c6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.696559 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f57ff497-fc3f-4872-b1e3-3927895d7c6c" (UID: "f57ff497-fc3f-4872-b1e3-3927895d7c6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.716885 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.716917 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.716927 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f57ff497-fc3f-4872-b1e3-3927895d7c6c-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.716937 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s9fd\" (UniqueName: \"kubernetes.io/projected/f57ff497-fc3f-4872-b1e3-3927895d7c6c-kube-api-access-2s9fd\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.923530 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.978363 4861 generic.go:334] "Generic (PLEG): container finished" podID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" containerID="d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572" exitCode=0 Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.978440 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92e600f4-f5d6-445a-aa5c-493b1eec92c8","Type":"ContainerDied","Data":"d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572"} Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.991911 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" event={"ID":"f57ff497-fc3f-4872-b1e3-3927895d7c6c","Type":"ContainerDied","Data":"f2e3a35e0f92649dd1e31a09fff676c40df1056591f33f48d56b8ebc64cc3cfb"} Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.991950 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2e3a35e0f92649dd1e31a09fff676c40df1056591f33f48d56b8ebc64cc3cfb" Oct 03 13:53:27 crc kubenswrapper[4861]: I1003 13:53:27.992007 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hgvnj" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030361 4861 generic.go:334] "Generic (PLEG): container finished" podID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerID="776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d" exitCode=0 Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030694 4861 generic.go:334] "Generic (PLEG): container finished" podID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerID="8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790" exitCode=143 Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030748 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d14ab87-b6a3-4c72-b82c-6accc45b650f","Type":"ContainerDied","Data":"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d"} Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030786 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d14ab87-b6a3-4c72-b82c-6accc45b650f","Type":"ContainerDied","Data":"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790"} Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030798 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d14ab87-b6a3-4c72-b82c-6accc45b650f","Type":"ContainerDied","Data":"32ce09aea0226ec3f14042b108465468c8a90c923fc2f6ed1d159fc5070f1a2f"} Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030817 4861 scope.go:117] "RemoveContainer" containerID="776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.030961 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.032057 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-nova-metadata-tls-certs\") pod \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.032240 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-config-data\") pod \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.032284 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgsj6\" (UniqueName: \"kubernetes.io/projected/0d14ab87-b6a3-4c72-b82c-6accc45b650f-kube-api-access-zgsj6\") pod \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.032334 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-combined-ca-bundle\") pod \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.032418 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d14ab87-b6a3-4c72-b82c-6accc45b650f-logs\") pod \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\" (UID: \"0d14ab87-b6a3-4c72-b82c-6accc45b650f\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.033183 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d14ab87-b6a3-4c72-b82c-6accc45b650f-logs" (OuterVolumeSpecName: "logs") pod "0d14ab87-b6a3-4c72-b82c-6accc45b650f" (UID: "0d14ab87-b6a3-4c72-b82c-6accc45b650f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.050714 4861 generic.go:334] "Generic (PLEG): container finished" podID="47fbe704-46a5-4313-ba09-06f613943f0a" containerID="b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670" exitCode=143 Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.050759 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47fbe704-46a5-4313-ba09-06f613943f0a","Type":"ContainerDied","Data":"b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670"} Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.051383 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d14ab87-b6a3-4c72-b82c-6accc45b650f-kube-api-access-zgsj6" (OuterVolumeSpecName: "kube-api-access-zgsj6") pod "0d14ab87-b6a3-4c72-b82c-6accc45b650f" (UID: "0d14ab87-b6a3-4c72-b82c-6accc45b650f"). InnerVolumeSpecName "kube-api-access-zgsj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.083339 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.083867 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0acf7c1b-380a-44b8-8542-6610bbc1f700" containerName="nova-manage" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.083884 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0acf7c1b-380a-44b8-8542-6610bbc1f700" containerName="nova-manage" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.083898 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-log" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.083904 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-log" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.083924 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerName="init" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.083930 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerName="init" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.083936 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f57ff497-fc3f-4872-b1e3-3927895d7c6c" containerName="nova-cell1-conductor-db-sync" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.083943 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f57ff497-fc3f-4872-b1e3-3927895d7c6c" containerName="nova-cell1-conductor-db-sync" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.083963 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerName="dnsmasq-dns" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.083969 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerName="dnsmasq-dns" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.083992 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-metadata" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.084000 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-metadata" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.093222 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="709ebd79-e1bf-4e30-ba04-d1dc2c954398" containerName="dnsmasq-dns" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.093299 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-metadata" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.093325 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f57ff497-fc3f-4872-b1e3-3927895d7c6c" containerName="nova-cell1-conductor-db-sync" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.093350 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="0acf7c1b-380a-44b8-8542-6610bbc1f700" containerName="nova-manage" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.093363 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" containerName="nova-metadata-log" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.095093 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.098618 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.098923 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d14ab87-b6a3-4c72-b82c-6accc45b650f" (UID: "0d14ab87-b6a3-4c72-b82c-6accc45b650f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.121584 4861 scope.go:117] "RemoveContainer" containerID="8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.132521 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.134012 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.134141 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.134180 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjvgn\" (UniqueName: \"kubernetes.io/projected/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-kube-api-access-zjvgn\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.138099 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgsj6\" (UniqueName: \"kubernetes.io/projected/0d14ab87-b6a3-4c72-b82c-6accc45b650f-kube-api-access-zgsj6\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.138137 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.138151 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d14ab87-b6a3-4c72-b82c-6accc45b650f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.156456 4861 scope.go:117] "RemoveContainer" containerID="776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.169147 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d\": container with ID starting with 776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d not found: ID does not exist" containerID="776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.169347 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d"} err="failed to get container status \"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d\": rpc error: code = NotFound desc = could not find container \"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d\": container with ID starting with 776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d not found: ID does not exist" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.169460 4861 scope.go:117] "RemoveContainer" containerID="8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.170003 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790\": container with ID starting with 8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790 not found: ID does not exist" containerID="8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.170172 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790"} err="failed to get container status \"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790\": rpc error: code = NotFound desc = could not find container \"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790\": container with ID starting with 8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790 not found: ID does not exist" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.170352 4861 scope.go:117] "RemoveContainer" containerID="776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.170782 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d"} err="failed to get container status \"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d\": rpc error: code = NotFound desc = could not find container \"776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d\": container with ID starting with 776efb223961fd07e7b9a3fdcb3b7cf5d40b2ae38ef8966ee61f89a34dc8659d not found: ID does not exist" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.171435 4861 scope.go:117] "RemoveContainer" containerID="8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.176499 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790"} err="failed to get container status \"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790\": rpc error: code = NotFound desc = could not find container \"8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790\": container with ID starting with 8e13995ba47ed22d80c04d85499925db68f1fbec17a50dcf1fe7d313fd19f790 not found: ID does not exist" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.182087 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-config-data" (OuterVolumeSpecName: "config-data") pod "0d14ab87-b6a3-4c72-b82c-6accc45b650f" (UID: "0d14ab87-b6a3-4c72-b82c-6accc45b650f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.216053 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "0d14ab87-b6a3-4c72-b82c-6accc45b650f" (UID: "0d14ab87-b6a3-4c72-b82c-6accc45b650f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: E1003 13:53:28.226399 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf57ff497_fc3f_4872_b1e3_3927895d7c6c.slice/crio-f2e3a35e0f92649dd1e31a09fff676c40df1056591f33f48d56b8ebc64cc3cfb\": RecentStats: unable to find data in memory cache]" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.239366 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.239416 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjvgn\" (UniqueName: \"kubernetes.io/projected/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-kube-api-access-zjvgn\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.239525 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.239613 4861 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.239627 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d14ab87-b6a3-4c72-b82c-6accc45b650f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.245418 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.262178 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjvgn\" (UniqueName: \"kubernetes.io/projected/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-kube-api-access-zjvgn\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.280192 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ae374e-4f8e-4d9c-84e8-00a5c571fd98-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90ae374e-4f8e-4d9c-84e8-00a5c571fd98\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.416915 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.431215 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.441397 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.443005 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.445903 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.446704 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.447205 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.474690 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.475503 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.543427 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-combined-ca-bundle\") pod \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.543674 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-config-data\") pod \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.543727 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtvnn\" (UniqueName: \"kubernetes.io/projected/92e600f4-f5d6-445a-aa5c-493b1eec92c8-kube-api-access-rtvnn\") pod \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\" (UID: \"92e600f4-f5d6-445a-aa5c-493b1eec92c8\") " Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.547612 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.547816 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-config-data\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.547846 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.547896 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02a413a1-1260-4c95-8513-c3266a085870-logs\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.547998 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzdhl\" (UniqueName: \"kubernetes.io/projected/02a413a1-1260-4c95-8513-c3266a085870-kube-api-access-wzdhl\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.554370 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92e600f4-f5d6-445a-aa5c-493b1eec92c8-kube-api-access-rtvnn" (OuterVolumeSpecName: "kube-api-access-rtvnn") pod "92e600f4-f5d6-445a-aa5c-493b1eec92c8" (UID: "92e600f4-f5d6-445a-aa5c-493b1eec92c8"). InnerVolumeSpecName "kube-api-access-rtvnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.602929 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-config-data" (OuterVolumeSpecName: "config-data") pod "92e600f4-f5d6-445a-aa5c-493b1eec92c8" (UID: "92e600f4-f5d6-445a-aa5c-493b1eec92c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.629427 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92e600f4-f5d6-445a-aa5c-493b1eec92c8" (UID: "92e600f4-f5d6-445a-aa5c-493b1eec92c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.651976 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-config-data\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652326 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652376 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02a413a1-1260-4c95-8513-c3266a085870-logs\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652441 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzdhl\" (UniqueName: \"kubernetes.io/projected/02a413a1-1260-4c95-8513-c3266a085870-kube-api-access-wzdhl\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652539 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652672 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652688 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtvnn\" (UniqueName: \"kubernetes.io/projected/92e600f4-f5d6-445a-aa5c-493b1eec92c8-kube-api-access-rtvnn\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.652699 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e600f4-f5d6-445a-aa5c-493b1eec92c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.653096 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02a413a1-1260-4c95-8513-c3266a085870-logs\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.660280 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.664620 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-config-data\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.668559 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.678610 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzdhl\" (UniqueName: \"kubernetes.io/projected/02a413a1-1260-4c95-8513-c3266a085870-kube-api-access-wzdhl\") pod \"nova-metadata-0\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.707498 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d14ab87-b6a3-4c72-b82c-6accc45b650f" path="/var/lib/kubelet/pods/0d14ab87-b6a3-4c72-b82c-6accc45b650f/volumes" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.876519 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.894621 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:53:28 crc kubenswrapper[4861]: I1003 13:53:28.894950 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" containerName="kube-state-metrics" containerID="cri-o://0a82d11036b5d4634c16c5cf9c05f099eb6047fbc529d01b940c17b558ee1783" gracePeriod=30 Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.006275 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 13:53:29 crc kubenswrapper[4861]: W1003 13:53:29.040721 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90ae374e_4f8e_4d9c_84e8_00a5c571fd98.slice/crio-6d2b9500072c4a0e5523b95c94c0ed5a141bd82fd58340f0f0c0337631c85c41 WatchSource:0}: Error finding container 6d2b9500072c4a0e5523b95c94c0ed5a141bd82fd58340f0f0c0337631c85c41: Status 404 returned error can't find the container with id 6d2b9500072c4a0e5523b95c94c0ed5a141bd82fd58340f0f0c0337631c85c41 Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.085435 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90ae374e-4f8e-4d9c-84e8-00a5c571fd98","Type":"ContainerStarted","Data":"6d2b9500072c4a0e5523b95c94c0ed5a141bd82fd58340f0f0c0337631c85c41"} Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.119631 4861 generic.go:334] "Generic (PLEG): container finished" podID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" containerID="0a82d11036b5d4634c16c5cf9c05f099eb6047fbc529d01b940c17b558ee1783" exitCode=2 Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.119719 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6bf9658-85f4-4e0b-9aa6-cf672e04a858","Type":"ContainerDied","Data":"0a82d11036b5d4634c16c5cf9c05f099eb6047fbc529d01b940c17b558ee1783"} Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.122032 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92e600f4-f5d6-445a-aa5c-493b1eec92c8","Type":"ContainerDied","Data":"5e836593ae0dc3e0ff94e1b96820606e3a700c14726d3f3315fad29fc5312f2b"} Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.122074 4861 scope.go:117] "RemoveContainer" containerID="d9a5399a1bb07e35a0f067bbf0f871f8946b80266870335cda8addae6553c572" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.122171 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.173355 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.291706 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.321198 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:29 crc kubenswrapper[4861]: E1003 13:53:29.321684 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" containerName="nova-scheduler-scheduler" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.321705 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" containerName="nova-scheduler-scheduler" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.321922 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" containerName="nova-scheduler-scheduler" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.322688 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.327845 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.358633 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.369620 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-config-data\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.369753 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.369787 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhfdc\" (UniqueName: \"kubernetes.io/projected/fabf66cd-aed6-4943-8ee6-6546c572cb91-kube-api-access-lhfdc\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.470886 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.471174 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhfdc\" (UniqueName: \"kubernetes.io/projected/fabf66cd-aed6-4943-8ee6-6546c572cb91-kube-api-access-lhfdc\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.471453 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-config-data\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.476050 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.476446 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-config-data\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.488993 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhfdc\" (UniqueName: \"kubernetes.io/projected/fabf66cd-aed6-4943-8ee6-6546c572cb91-kube-api-access-lhfdc\") pod \"nova-scheduler-0\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.584917 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.665203 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.696344 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.778529 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2twt6\" (UniqueName: \"kubernetes.io/projected/f6bf9658-85f4-4e0b-9aa6-cf672e04a858-kube-api-access-2twt6\") pod \"f6bf9658-85f4-4e0b-9aa6-cf672e04a858\" (UID: \"f6bf9658-85f4-4e0b-9aa6-cf672e04a858\") " Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.787866 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6bf9658-85f4-4e0b-9aa6-cf672e04a858-kube-api-access-2twt6" (OuterVolumeSpecName: "kube-api-access-2twt6") pod "f6bf9658-85f4-4e0b-9aa6-cf672e04a858" (UID: "f6bf9658-85f4-4e0b-9aa6-cf672e04a858"). InnerVolumeSpecName "kube-api-access-2twt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:29 crc kubenswrapper[4861]: I1003 13:53:29.880736 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2twt6\" (UniqueName: \"kubernetes.io/projected/f6bf9658-85f4-4e0b-9aa6-cf672e04a858-kube-api-access-2twt6\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.131449 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02a413a1-1260-4c95-8513-c3266a085870","Type":"ContainerStarted","Data":"cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8"} Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.131502 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02a413a1-1260-4c95-8513-c3266a085870","Type":"ContainerStarted","Data":"1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a"} Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.131517 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02a413a1-1260-4c95-8513-c3266a085870","Type":"ContainerStarted","Data":"b12e040fa4850cc9492ec36cda3ad8a4ddf1752b8b941eaa74cb8662b866944d"} Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.134250 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6bf9658-85f4-4e0b-9aa6-cf672e04a858","Type":"ContainerDied","Data":"371b5cb918786219d9674932e023378bc86ce57020828b9077c3c8b00282851a"} Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.134299 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.134601 4861 scope.go:117] "RemoveContainer" containerID="0a82d11036b5d4634c16c5cf9c05f099eb6047fbc529d01b940c17b558ee1783" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.137391 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90ae374e-4f8e-4d9c-84e8-00a5c571fd98","Type":"ContainerStarted","Data":"da7ef2deb5c2b19018296e2dc84fea11c0591f247bace9547b078ad4584fda9e"} Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.138112 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.145539 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.145591 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.153602 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.153583322 podStartE2EDuration="2.153583322s" podCreationTimestamp="2025-10-03 13:53:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:30.151600559 +0000 UTC m=+1324.149585606" watchObservedRunningTime="2025-10-03 13:53:30.153583322 +0000 UTC m=+1324.151568369" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.193153 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.193138772 podStartE2EDuration="2.193138772s" podCreationTimestamp="2025-10-03 13:53:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:30.176937341 +0000 UTC m=+1324.174922408" watchObservedRunningTime="2025-10-03 13:53:30.193138772 +0000 UTC m=+1324.191123819" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.208006 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.219309 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.247290 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:53:30 crc kubenswrapper[4861]: E1003 13:53:30.247666 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" containerName="kube-state-metrics" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.247682 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" containerName="kube-state-metrics" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.247864 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" containerName="kube-state-metrics" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.248505 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.251746 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.262310 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.270057 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.278485 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.292056 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtj7d\" (UniqueName: \"kubernetes.io/projected/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-api-access-vtj7d\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.292126 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.292165 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.292213 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.393888 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtj7d\" (UniqueName: \"kubernetes.io/projected/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-api-access-vtj7d\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.393949 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.394012 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.394097 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.401887 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.405071 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.415115 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtj7d\" (UniqueName: \"kubernetes.io/projected/ccb19566-aa49-4551-9fbf-a05affdd60e2-kube-api-access-vtj7d\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.419529 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccb19566-aa49-4551-9fbf-a05affdd60e2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ccb19566-aa49-4551-9fbf-a05affdd60e2\") " pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.596112 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.693206 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92e600f4-f5d6-445a-aa5c-493b1eec92c8" path="/var/lib/kubelet/pods/92e600f4-f5d6-445a-aa5c-493b1eec92c8/volumes" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.693910 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6bf9658-85f4-4e0b-9aa6-cf672e04a858" path="/var/lib/kubelet/pods/f6bf9658-85f4-4e0b-9aa6-cf672e04a858/volumes" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.831691 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.905414 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47fbe704-46a5-4313-ba09-06f613943f0a-logs\") pod \"47fbe704-46a5-4313-ba09-06f613943f0a\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.905509 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-combined-ca-bundle\") pod \"47fbe704-46a5-4313-ba09-06f613943f0a\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.905633 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb6zh\" (UniqueName: \"kubernetes.io/projected/47fbe704-46a5-4313-ba09-06f613943f0a-kube-api-access-qb6zh\") pod \"47fbe704-46a5-4313-ba09-06f613943f0a\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.905710 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-config-data\") pod \"47fbe704-46a5-4313-ba09-06f613943f0a\" (UID: \"47fbe704-46a5-4313-ba09-06f613943f0a\") " Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.909450 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47fbe704-46a5-4313-ba09-06f613943f0a-logs" (OuterVolumeSpecName: "logs") pod "47fbe704-46a5-4313-ba09-06f613943f0a" (UID: "47fbe704-46a5-4313-ba09-06f613943f0a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.928618 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47fbe704-46a5-4313-ba09-06f613943f0a-kube-api-access-qb6zh" (OuterVolumeSpecName: "kube-api-access-qb6zh") pod "47fbe704-46a5-4313-ba09-06f613943f0a" (UID: "47fbe704-46a5-4313-ba09-06f613943f0a"). InnerVolumeSpecName "kube-api-access-qb6zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.961139 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-config-data" (OuterVolumeSpecName: "config-data") pod "47fbe704-46a5-4313-ba09-06f613943f0a" (UID: "47fbe704-46a5-4313-ba09-06f613943f0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:30 crc kubenswrapper[4861]: I1003 13:53:30.970491 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47fbe704-46a5-4313-ba09-06f613943f0a" (UID: "47fbe704-46a5-4313-ba09-06f613943f0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.007213 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb6zh\" (UniqueName: \"kubernetes.io/projected/47fbe704-46a5-4313-ba09-06f613943f0a-kube-api-access-qb6zh\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.007282 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.007292 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47fbe704-46a5-4313-ba09-06f613943f0a-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.007306 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47fbe704-46a5-4313-ba09-06f613943f0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.149652 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fabf66cd-aed6-4943-8ee6-6546c572cb91","Type":"ContainerStarted","Data":"4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a"} Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.149707 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fabf66cd-aed6-4943-8ee6-6546c572cb91","Type":"ContainerStarted","Data":"038a86d8d1582d33f526ff90330ce2a172705538bb354dc1281bd1a622cfd5b3"} Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.158957 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.159012 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47fbe704-46a5-4313-ba09-06f613943f0a","Type":"ContainerDied","Data":"15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782"} Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.159059 4861 scope.go:117] "RemoveContainer" containerID="15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.158843 4861 generic.go:334] "Generic (PLEG): container finished" podID="47fbe704-46a5-4313-ba09-06f613943f0a" containerID="15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782" exitCode=0 Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.160706 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47fbe704-46a5-4313-ba09-06f613943f0a","Type":"ContainerDied","Data":"7f8d2d68028438f371854b4a32f146ae7240f0ad27695e5b0d7ae6fbb44728e8"} Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.197526 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.204554 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.204534071 podStartE2EDuration="2.204534071s" podCreationTimestamp="2025-10-03 13:53:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:31.183951935 +0000 UTC m=+1325.181937002" watchObservedRunningTime="2025-10-03 13:53:31.204534071 +0000 UTC m=+1325.202519118" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.215487 4861 scope.go:117] "RemoveContainer" containerID="b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.220280 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.232947 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.246260 4861 scope.go:117] "RemoveContainer" containerID="15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782" Oct 03 13:53:31 crc kubenswrapper[4861]: E1003 13:53:31.246651 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782\": container with ID starting with 15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782 not found: ID does not exist" containerID="15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.246681 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782"} err="failed to get container status \"15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782\": rpc error: code = NotFound desc = could not find container \"15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782\": container with ID starting with 15fbdfb8cd8c486df8149ea871d1f6788a26becfff3b98c8f1b9c9417ce30782 not found: ID does not exist" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.246701 4861 scope.go:117] "RemoveContainer" containerID="b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670" Oct 03 13:53:31 crc kubenswrapper[4861]: E1003 13:53:31.246935 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670\": container with ID starting with b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670 not found: ID does not exist" containerID="b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.246972 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670"} err="failed to get container status \"b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670\": rpc error: code = NotFound desc = could not find container \"b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670\": container with ID starting with b29e093bf1dd8a292ea6d261cc2bb25d42e65fe2422cc1c18af9c001e5153670 not found: ID does not exist" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.248162 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:31 crc kubenswrapper[4861]: E1003 13:53:31.248532 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-api" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.248552 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-api" Oct 03 13:53:31 crc kubenswrapper[4861]: E1003 13:53:31.248567 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-log" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.248575 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-log" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.248777 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-log" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.248808 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" containerName="nova-api-api" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.252352 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.257299 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.266189 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.312146 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lsgp\" (UniqueName: \"kubernetes.io/projected/fd1efd4a-2b99-445d-b2af-4f129db94e2c-kube-api-access-4lsgp\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.312659 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.313012 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-config-data\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.313131 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd1efd4a-2b99-445d-b2af-4f129db94e2c-logs\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.416724 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-config-data\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.417792 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd1efd4a-2b99-445d-b2af-4f129db94e2c-logs\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.418053 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lsgp\" (UniqueName: \"kubernetes.io/projected/fd1efd4a-2b99-445d-b2af-4f129db94e2c-kube-api-access-4lsgp\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.418173 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.418280 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd1efd4a-2b99-445d-b2af-4f129db94e2c-logs\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.423830 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.424023 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-config-data\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.438966 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lsgp\" (UniqueName: \"kubernetes.io/projected/fd1efd4a-2b99-445d-b2af-4f129db94e2c-kube-api-access-4lsgp\") pod \"nova-api-0\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " pod="openstack/nova-api-0" Oct 03 13:53:31 crc kubenswrapper[4861]: I1003 13:53:31.636658 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.074869 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.075936 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="proxy-httpd" containerID="cri-o://a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8" gracePeriod=30 Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.075925 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="sg-core" containerID="cri-o://25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba" gracePeriod=30 Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.075950 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-notification-agent" containerID="cri-o://9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b" gracePeriod=30 Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.075968 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-central-agent" containerID="cri-o://f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f" gracePeriod=30 Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.139734 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:32 crc kubenswrapper[4861]: W1003 13:53:32.153425 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd1efd4a_2b99_445d_b2af_4f129db94e2c.slice/crio-5afb97f90dd81a3a504b324a7bd76f33b871125b5d35582dcbc36ae0f1d7da15 WatchSource:0}: Error finding container 5afb97f90dd81a3a504b324a7bd76f33b871125b5d35582dcbc36ae0f1d7da15: Status 404 returned error can't find the container with id 5afb97f90dd81a3a504b324a7bd76f33b871125b5d35582dcbc36ae0f1d7da15 Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.181833 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd1efd4a-2b99-445d-b2af-4f129db94e2c","Type":"ContainerStarted","Data":"5afb97f90dd81a3a504b324a7bd76f33b871125b5d35582dcbc36ae0f1d7da15"} Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.183405 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ccb19566-aa49-4551-9fbf-a05affdd60e2","Type":"ContainerStarted","Data":"ece4277a0421f040e33859b54e5cd34513ad22539eb03ea0706f94529b8bc246"} Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.183522 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ccb19566-aa49-4551-9fbf-a05affdd60e2","Type":"ContainerStarted","Data":"07ad471be2ac77bf95ca9f332fb53f829ef39663e1d4ea02ac76a250da5cba35"} Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.183617 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.224696 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.770408049 podStartE2EDuration="2.224677663s" podCreationTimestamp="2025-10-03 13:53:30 +0000 UTC" firstStartedPulling="2025-10-03 13:53:31.215490062 +0000 UTC m=+1325.213475109" lastFinishedPulling="2025-10-03 13:53:31.669759676 +0000 UTC m=+1325.667744723" observedRunningTime="2025-10-03 13:53:32.203733247 +0000 UTC m=+1326.201718294" watchObservedRunningTime="2025-10-03 13:53:32.224677663 +0000 UTC m=+1326.222662710" Oct 03 13:53:32 crc kubenswrapper[4861]: I1003 13:53:32.692425 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47fbe704-46a5-4313-ba09-06f613943f0a" path="/var/lib/kubelet/pods/47fbe704-46a5-4313-ba09-06f613943f0a/volumes" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.197666 4861 generic.go:334] "Generic (PLEG): container finished" podID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerID="a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8" exitCode=0 Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.197696 4861 generic.go:334] "Generic (PLEG): container finished" podID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerID="25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba" exitCode=2 Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.197703 4861 generic.go:334] "Generic (PLEG): container finished" podID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerID="f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f" exitCode=0 Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.197748 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerDied","Data":"a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8"} Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.197778 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerDied","Data":"25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba"} Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.197790 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerDied","Data":"f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f"} Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.200986 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd1efd4a-2b99-445d-b2af-4f129db94e2c","Type":"ContainerStarted","Data":"e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c"} Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.201021 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd1efd4a-2b99-445d-b2af-4f129db94e2c","Type":"ContainerStarted","Data":"0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99"} Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.230602 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.230583127 podStartE2EDuration="2.230583127s" podCreationTimestamp="2025-10-03 13:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:33.224576997 +0000 UTC m=+1327.222562044" watchObservedRunningTime="2025-10-03 13:53:33.230583127 +0000 UTC m=+1327.228568184" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.576824 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672340 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8jtx\" (UniqueName: \"kubernetes.io/projected/7139b71d-8825-4675-b435-a0e359d2e5c7-kube-api-access-m8jtx\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672453 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-run-httpd\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672530 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-scripts\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672783 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-log-httpd\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672814 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-combined-ca-bundle\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672875 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-config-data\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.672957 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-sg-core-conf-yaml\") pod \"7139b71d-8825-4675-b435-a0e359d2e5c7\" (UID: \"7139b71d-8825-4675-b435-a0e359d2e5c7\") " Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.674856 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.675384 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.679048 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-scripts" (OuterVolumeSpecName: "scripts") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.697445 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7139b71d-8825-4675-b435-a0e359d2e5c7-kube-api-access-m8jtx" (OuterVolumeSpecName: "kube-api-access-m8jtx") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "kube-api-access-m8jtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.757905 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.775631 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.775663 4861 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.775674 4861 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.775683 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8jtx\" (UniqueName: \"kubernetes.io/projected/7139b71d-8825-4675-b435-a0e359d2e5c7-kube-api-access-m8jtx\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.775692 4861 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7139b71d-8825-4675-b435-a0e359d2e5c7-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.787724 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.805572 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-config-data" (OuterVolumeSpecName: "config-data") pod "7139b71d-8825-4675-b435-a0e359d2e5c7" (UID: "7139b71d-8825-4675-b435-a0e359d2e5c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.876614 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.876960 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.878351 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:33 crc kubenswrapper[4861]: I1003 13:53:33.878371 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7139b71d-8825-4675-b435-a0e359d2e5c7-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.210610 4861 generic.go:334] "Generic (PLEG): container finished" podID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerID="9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b" exitCode=0 Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.210696 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.210730 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerDied","Data":"9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b"} Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.210774 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7139b71d-8825-4675-b435-a0e359d2e5c7","Type":"ContainerDied","Data":"1a44b53eaa922545b99a6b5b819c254813f1ccd65327305b03b51f2703757e22"} Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.210794 4861 scope.go:117] "RemoveContainer" containerID="a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.232943 4861 scope.go:117] "RemoveContainer" containerID="25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.252576 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.287968 4861 scope.go:117] "RemoveContainer" containerID="9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.289447 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.303279 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.303960 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-notification-agent" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.304064 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-notification-agent" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.304155 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-central-agent" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.304246 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-central-agent" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.304346 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="proxy-httpd" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.304429 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="proxy-httpd" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.304514 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="sg-core" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.304586 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="sg-core" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.304859 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="sg-core" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.304949 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="proxy-httpd" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.305040 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-notification-agent" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.305118 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" containerName="ceilometer-central-agent" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.307209 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.315461 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.317439 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.317745 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.317991 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.353820 4861 scope.go:117] "RemoveContainer" containerID="f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.374381 4861 scope.go:117] "RemoveContainer" containerID="a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.374813 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8\": container with ID starting with a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8 not found: ID does not exist" containerID="a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.374867 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8"} err="failed to get container status \"a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8\": rpc error: code = NotFound desc = could not find container \"a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8\": container with ID starting with a36f9a92d95e198c1ad4a086290b10d3bc3a798e4eb8b5b3e8a8028a39d578b8 not found: ID does not exist" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.374892 4861 scope.go:117] "RemoveContainer" containerID="25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.375183 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba\": container with ID starting with 25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba not found: ID does not exist" containerID="25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.375205 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba"} err="failed to get container status \"25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba\": rpc error: code = NotFound desc = could not find container \"25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba\": container with ID starting with 25c808eef4b0e2cc97e5fdd898987674b3a1b7d6f1053980f1e7924f1f7f02ba not found: ID does not exist" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.375224 4861 scope.go:117] "RemoveContainer" containerID="9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.375506 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b\": container with ID starting with 9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b not found: ID does not exist" containerID="9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.375530 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b"} err="failed to get container status \"9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b\": rpc error: code = NotFound desc = could not find container \"9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b\": container with ID starting with 9e9b1c12d03249e86b39ff504f5ad415adc62d5988e71d1461820bd67aeaf81b not found: ID does not exist" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.375546 4861 scope.go:117] "RemoveContainer" containerID="f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f" Oct 03 13:53:34 crc kubenswrapper[4861]: E1003 13:53:34.375816 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f\": container with ID starting with f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f not found: ID does not exist" containerID="f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.375840 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f"} err="failed to get container status \"f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f\": rpc error: code = NotFound desc = could not find container \"f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f\": container with ID starting with f64be3d4711c96b83bdf66a65f2a0777a3612810116e833d3457972fcb5aeb7f not found: ID does not exist" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.386783 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.386916 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-run-httpd\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.386944 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.386971 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.387013 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-config-data\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.387057 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-scripts\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.387079 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-log-httpd\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.387133 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn65r\" (UniqueName: \"kubernetes.io/projected/2d8414db-04a2-4f58-a99f-73d7fd445d5d-kube-api-access-jn65r\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489326 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-config-data\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489423 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-log-httpd\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489460 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-scripts\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489601 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn65r\" (UniqueName: \"kubernetes.io/projected/2d8414db-04a2-4f58-a99f-73d7fd445d5d-kube-api-access-jn65r\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489693 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489828 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-run-httpd\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489883 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.489935 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.492013 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-run-httpd\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.492485 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-log-httpd\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.503846 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-scripts\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.504045 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.504524 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.504680 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-config-data\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.505044 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.509178 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn65r\" (UniqueName: \"kubernetes.io/projected/2d8414db-04a2-4f58-a99f-73d7fd445d5d-kube-api-access-jn65r\") pod \"ceilometer-0\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.647157 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.692206 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7139b71d-8825-4675-b435-a0e359d2e5c7" path="/var/lib/kubelet/pods/7139b71d-8825-4675-b435-a0e359d2e5c7/volumes" Oct 03 13:53:34 crc kubenswrapper[4861]: I1003 13:53:34.697700 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 13:53:35 crc kubenswrapper[4861]: I1003 13:53:35.084672 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:35 crc kubenswrapper[4861]: W1003 13:53:35.088064 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d8414db_04a2_4f58_a99f_73d7fd445d5d.slice/crio-96539ce71b141c97ab7b340b04324f6f2f9871a73033d0f9de08484f40359862 WatchSource:0}: Error finding container 96539ce71b141c97ab7b340b04324f6f2f9871a73033d0f9de08484f40359862: Status 404 returned error can't find the container with id 96539ce71b141c97ab7b340b04324f6f2f9871a73033d0f9de08484f40359862 Oct 03 13:53:35 crc kubenswrapper[4861]: I1003 13:53:35.228734 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerStarted","Data":"96539ce71b141c97ab7b340b04324f6f2f9871a73033d0f9de08484f40359862"} Oct 03 13:53:36 crc kubenswrapper[4861]: I1003 13:53:36.238988 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerStarted","Data":"6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823"} Oct 03 13:53:37 crc kubenswrapper[4861]: I1003 13:53:37.252188 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerStarted","Data":"aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584"} Oct 03 13:53:38 crc kubenswrapper[4861]: I1003 13:53:38.265301 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerStarted","Data":"c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf"} Oct 03 13:53:38 crc kubenswrapper[4861]: I1003 13:53:38.479263 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 13:53:38 crc kubenswrapper[4861]: I1003 13:53:38.877994 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:53:38 crc kubenswrapper[4861]: I1003 13:53:38.878282 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.275970 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerStarted","Data":"49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39"} Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.276977 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.301065 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.739093516 podStartE2EDuration="5.30105163s" podCreationTimestamp="2025-10-03 13:53:34 +0000 UTC" firstStartedPulling="2025-10-03 13:53:35.091531381 +0000 UTC m=+1329.089516428" lastFinishedPulling="2025-10-03 13:53:38.653489495 +0000 UTC m=+1332.651474542" observedRunningTime="2025-10-03 13:53:39.294357142 +0000 UTC m=+1333.292342189" watchObservedRunningTime="2025-10-03 13:53:39.30105163 +0000 UTC m=+1333.299036677" Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.697087 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.737295 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.892369 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:53:39 crc kubenswrapper[4861]: I1003 13:53:39.892624 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:53:40 crc kubenswrapper[4861]: I1003 13:53:40.316967 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 13:53:40 crc kubenswrapper[4861]: I1003 13:53:40.640152 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 13:53:41 crc kubenswrapper[4861]: I1003 13:53:41.637279 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:53:41 crc kubenswrapper[4861]: I1003 13:53:41.637538 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:53:42 crc kubenswrapper[4861]: I1003 13:53:42.720538 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:53:42 crc kubenswrapper[4861]: I1003 13:53:42.720678 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:53:48 crc kubenswrapper[4861]: I1003 13:53:48.883255 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:53:48 crc kubenswrapper[4861]: I1003 13:53:48.883859 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:53:48 crc kubenswrapper[4861]: I1003 13:53:48.891566 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:53:48 crc kubenswrapper[4861]: I1003 13:53:48.891900 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.165849 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.191799 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-combined-ca-bundle\") pod \"344b366c-9eb6-4732-be14-9d20dfd27336\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.192732 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-config-data\") pod \"344b366c-9eb6-4732-be14-9d20dfd27336\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.192796 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd2ml\" (UniqueName: \"kubernetes.io/projected/344b366c-9eb6-4732-be14-9d20dfd27336-kube-api-access-xd2ml\") pod \"344b366c-9eb6-4732-be14-9d20dfd27336\" (UID: \"344b366c-9eb6-4732-be14-9d20dfd27336\") " Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.199520 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344b366c-9eb6-4732-be14-9d20dfd27336-kube-api-access-xd2ml" (OuterVolumeSpecName: "kube-api-access-xd2ml") pod "344b366c-9eb6-4732-be14-9d20dfd27336" (UID: "344b366c-9eb6-4732-be14-9d20dfd27336"). InnerVolumeSpecName "kube-api-access-xd2ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.227523 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-config-data" (OuterVolumeSpecName: "config-data") pod "344b366c-9eb6-4732-be14-9d20dfd27336" (UID: "344b366c-9eb6-4732-be14-9d20dfd27336"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.229106 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "344b366c-9eb6-4732-be14-9d20dfd27336" (UID: "344b366c-9eb6-4732-be14-9d20dfd27336"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.295909 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.295972 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344b366c-9eb6-4732-be14-9d20dfd27336-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.296014 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd2ml\" (UniqueName: \"kubernetes.io/projected/344b366c-9eb6-4732-be14-9d20dfd27336-kube-api-access-xd2ml\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.372331 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.372366 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"344b366c-9eb6-4732-be14-9d20dfd27336","Type":"ContainerDied","Data":"352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1"} Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.372451 4861 scope.go:117] "RemoveContainer" containerID="352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.374431 4861 generic.go:334] "Generic (PLEG): container finished" podID="344b366c-9eb6-4732-be14-9d20dfd27336" containerID="352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1" exitCode=137 Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.374554 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"344b366c-9eb6-4732-be14-9d20dfd27336","Type":"ContainerDied","Data":"0ffb80b26a81083af149d21cfdc4057c1fed679b369255bc56535745dbe5ce45"} Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.412052 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.421703 4861 scope.go:117] "RemoveContainer" containerID="352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.422053 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:50 crc kubenswrapper[4861]: E1003 13:53:50.422361 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1\": container with ID starting with 352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1 not found: ID does not exist" containerID="352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.422412 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1"} err="failed to get container status \"352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1\": rpc error: code = NotFound desc = could not find container \"352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1\": container with ID starting with 352b16fb06d1d494c344a9f71ec0cdceb5346b7312091906be3855f6d1b95bd1 not found: ID does not exist" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.440417 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:50 crc kubenswrapper[4861]: E1003 13:53:50.441164 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344b366c-9eb6-4732-be14-9d20dfd27336" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.441194 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="344b366c-9eb6-4732-be14-9d20dfd27336" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.441518 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="344b366c-9eb6-4732-be14-9d20dfd27336" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.442597 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.444857 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.445214 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.446978 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.473652 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.498412 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.498453 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8qmh\" (UniqueName: \"kubernetes.io/projected/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-kube-api-access-v8qmh\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.498551 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.498572 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.498589 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.600265 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.600339 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.600376 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.600473 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8qmh\" (UniqueName: \"kubernetes.io/projected/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-kube-api-access-v8qmh\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.600496 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.604678 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.605533 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.606165 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.608913 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.624498 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8qmh\" (UniqueName: \"kubernetes.io/projected/9b155fef-8eef-48f9-a6fe-b76d46ddadb0-kube-api-access-v8qmh\") pod \"nova-cell1-novncproxy-0\" (UID: \"9b155fef-8eef-48f9-a6fe-b76d46ddadb0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.695014 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="344b366c-9eb6-4732-be14-9d20dfd27336" path="/var/lib/kubelet/pods/344b366c-9eb6-4732-be14-9d20dfd27336/volumes" Oct 03 13:53:50 crc kubenswrapper[4861]: I1003 13:53:50.762180 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.212487 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:53:51 crc kubenswrapper[4861]: W1003 13:53:51.225746 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b155fef_8eef_48f9_a6fe_b76d46ddadb0.slice/crio-21edc922206407417f4a6a83060468ebec536f26060e40431f67f67b5da8df49 WatchSource:0}: Error finding container 21edc922206407417f4a6a83060468ebec536f26060e40431f67f67b5da8df49: Status 404 returned error can't find the container with id 21edc922206407417f4a6a83060468ebec536f26060e40431f67f67b5da8df49 Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.384373 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"9b155fef-8eef-48f9-a6fe-b76d46ddadb0","Type":"ContainerStarted","Data":"21edc922206407417f4a6a83060468ebec536f26060e40431f67f67b5da8df49"} Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.641029 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.641101 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.641629 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.641652 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.645369 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.646617 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.919717 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-d4jfw"] Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.921868 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:51 crc kubenswrapper[4861]: I1003 13:53:51.960558 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-d4jfw"] Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.027201 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5zhl\" (UniqueName: \"kubernetes.io/projected/2c9a4578-b9f0-4eba-a774-580a69276809-kube-api-access-g5zhl\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.027270 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.027293 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.027354 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-config\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.027420 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.027473 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.131317 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.131418 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.131500 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5zhl\" (UniqueName: \"kubernetes.io/projected/2c9a4578-b9f0-4eba-a774-580a69276809-kube-api-access-g5zhl\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.131536 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.131566 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.131645 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-config\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.132761 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-config\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.136876 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.136953 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.137020 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.137425 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.192287 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5zhl\" (UniqueName: \"kubernetes.io/projected/2c9a4578-b9f0-4eba-a774-580a69276809-kube-api-access-g5zhl\") pod \"dnsmasq-dns-59cf4bdb65-d4jfw\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.324260 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.403981 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"9b155fef-8eef-48f9-a6fe-b76d46ddadb0","Type":"ContainerStarted","Data":"a88c3102e17399ba3264b0e7a508f659661e1eb9839d3758d2d6048b23cf517b"} Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.430090 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.430064497 podStartE2EDuration="2.430064497s" podCreationTimestamp="2025-10-03 13:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:52.421862389 +0000 UTC m=+1346.419847436" watchObservedRunningTime="2025-10-03 13:53:52.430064497 +0000 UTC m=+1346.428049544" Oct 03 13:53:52 crc kubenswrapper[4861]: I1003 13:53:52.852877 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-d4jfw"] Oct 03 13:53:53 crc kubenswrapper[4861]: I1003 13:53:53.411824 4861 generic.go:334] "Generic (PLEG): container finished" podID="2c9a4578-b9f0-4eba-a774-580a69276809" containerID="4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a" exitCode=0 Oct 03 13:53:53 crc kubenswrapper[4861]: I1003 13:53:53.411996 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" event={"ID":"2c9a4578-b9f0-4eba-a774-580a69276809","Type":"ContainerDied","Data":"4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a"} Oct 03 13:53:53 crc kubenswrapper[4861]: I1003 13:53:53.412154 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" event={"ID":"2c9a4578-b9f0-4eba-a774-580a69276809","Type":"ContainerStarted","Data":"0cdb23e1d25c734e0bb8ed8b99504e9dce28da5b0bae88380933b263dff703d3"} Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.456437 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" event={"ID":"2c9a4578-b9f0-4eba-a774-580a69276809","Type":"ContainerStarted","Data":"e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9"} Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.458858 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.471920 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.472720 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="proxy-httpd" containerID="cri-o://49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39" gracePeriod=30 Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.472738 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-central-agent" containerID="cri-o://6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823" gracePeriod=30 Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.472892 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="sg-core" containerID="cri-o://c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf" gracePeriod=30 Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.472959 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-notification-agent" containerID="cri-o://aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584" gracePeriod=30 Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.492212 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" podStartSLOduration=3.4921945 podStartE2EDuration="3.4921945s" podCreationTimestamp="2025-10-03 13:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:53:54.483867059 +0000 UTC m=+1348.481852106" watchObservedRunningTime="2025-10-03 13:53:54.4921945 +0000 UTC m=+1348.490179547" Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.585903 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": read tcp 10.217.0.2:41550->10.217.0.195:3000: read: connection reset by peer" Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.624310 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.624544 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-log" containerID="cri-o://0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99" gracePeriod=30 Oct 03 13:53:54 crc kubenswrapper[4861]: I1003 13:53:54.624656 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-api" containerID="cri-o://e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c" gracePeriod=30 Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.466967 4861 generic.go:334] "Generic (PLEG): container finished" podID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerID="49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39" exitCode=0 Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.466995 4861 generic.go:334] "Generic (PLEG): container finished" podID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerID="c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf" exitCode=2 Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.467004 4861 generic.go:334] "Generic (PLEG): container finished" podID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerID="6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823" exitCode=0 Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.467046 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerDied","Data":"49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39"} Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.467072 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerDied","Data":"c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf"} Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.467082 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerDied","Data":"6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823"} Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.469003 4861 generic.go:334] "Generic (PLEG): container finished" podID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerID="0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99" exitCode=143 Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.469069 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd1efd4a-2b99-445d-b2af-4f129db94e2c","Type":"ContainerDied","Data":"0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99"} Oct 03 13:53:55 crc kubenswrapper[4861]: I1003 13:53:55.762958 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.304953 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.313560 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.452984 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-scripts\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453070 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn65r\" (UniqueName: \"kubernetes.io/projected/2d8414db-04a2-4f58-a99f-73d7fd445d5d-kube-api-access-jn65r\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453108 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-config-data\") pod \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453136 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-log-httpd\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453178 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lsgp\" (UniqueName: \"kubernetes.io/projected/fd1efd4a-2b99-445d-b2af-4f129db94e2c-kube-api-access-4lsgp\") pod \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453205 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-combined-ca-bundle\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453330 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-config-data\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453369 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-combined-ca-bundle\") pod \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453397 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd1efd4a-2b99-445d-b2af-4f129db94e2c-logs\") pod \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\" (UID: \"fd1efd4a-2b99-445d-b2af-4f129db94e2c\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453431 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-run-httpd\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453491 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-ceilometer-tls-certs\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.453515 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-sg-core-conf-yaml\") pod \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\" (UID: \"2d8414db-04a2-4f58-a99f-73d7fd445d5d\") " Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.454060 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd1efd4a-2b99-445d-b2af-4f129db94e2c-logs" (OuterVolumeSpecName: "logs") pod "fd1efd4a-2b99-445d-b2af-4f129db94e2c" (UID: "fd1efd4a-2b99-445d-b2af-4f129db94e2c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.454288 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.455288 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.461625 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d8414db-04a2-4f58-a99f-73d7fd445d5d-kube-api-access-jn65r" (OuterVolumeSpecName: "kube-api-access-jn65r") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "kube-api-access-jn65r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.465275 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd1efd4a-2b99-445d-b2af-4f129db94e2c-kube-api-access-4lsgp" (OuterVolumeSpecName: "kube-api-access-4lsgp") pod "fd1efd4a-2b99-445d-b2af-4f129db94e2c" (UID: "fd1efd4a-2b99-445d-b2af-4f129db94e2c"). InnerVolumeSpecName "kube-api-access-4lsgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.487337 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-scripts" (OuterVolumeSpecName: "scripts") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.522299 4861 generic.go:334] "Generic (PLEG): container finished" podID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerID="aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584" exitCode=0 Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.522372 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerDied","Data":"aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584"} Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.522403 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2d8414db-04a2-4f58-a99f-73d7fd445d5d","Type":"ContainerDied","Data":"96539ce71b141c97ab7b340b04324f6f2f9871a73033d0f9de08484f40359862"} Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.522421 4861 scope.go:117] "RemoveContainer" containerID="49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.522585 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.531144 4861 generic.go:334] "Generic (PLEG): container finished" podID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerID="e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c" exitCode=0 Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.531183 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd1efd4a-2b99-445d-b2af-4f129db94e2c","Type":"ContainerDied","Data":"e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c"} Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.531209 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd1efd4a-2b99-445d-b2af-4f129db94e2c","Type":"ContainerDied","Data":"5afb97f90dd81a3a504b324a7bd76f33b871125b5d35582dcbc36ae0f1d7da15"} Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.531293 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.538443 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd1efd4a-2b99-445d-b2af-4f129db94e2c" (UID: "fd1efd4a-2b99-445d-b2af-4f129db94e2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.547827 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-config-data" (OuterVolumeSpecName: "config-data") pod "fd1efd4a-2b99-445d-b2af-4f129db94e2c" (UID: "fd1efd4a-2b99-445d-b2af-4f129db94e2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581693 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lsgp\" (UniqueName: \"kubernetes.io/projected/fd1efd4a-2b99-445d-b2af-4f129db94e2c-kube-api-access-4lsgp\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581733 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581744 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd1efd4a-2b99-445d-b2af-4f129db94e2c-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581753 4861 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581762 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581774 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn65r\" (UniqueName: \"kubernetes.io/projected/2d8414db-04a2-4f58-a99f-73d7fd445d5d-kube-api-access-jn65r\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581784 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1efd4a-2b99-445d-b2af-4f129db94e2c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.581792 4861 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2d8414db-04a2-4f58-a99f-73d7fd445d5d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.592986 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.593866 4861 scope.go:117] "RemoveContainer" containerID="c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.617199 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.625294 4861 scope.go:117] "RemoveContainer" containerID="aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.632181 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.646318 4861 scope.go:117] "RemoveContainer" containerID="6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.666958 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-config-data" (OuterVolumeSpecName: "config-data") pod "2d8414db-04a2-4f58-a99f-73d7fd445d5d" (UID: "2d8414db-04a2-4f58-a99f-73d7fd445d5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.672847 4861 scope.go:117] "RemoveContainer" containerID="49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.673302 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39\": container with ID starting with 49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39 not found: ID does not exist" containerID="49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.673352 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39"} err="failed to get container status \"49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39\": rpc error: code = NotFound desc = could not find container \"49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39\": container with ID starting with 49b9fa23272a671e8c4703d4b40f35e6df65a441caa953043f3d8699b2a5ae39 not found: ID does not exist" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.673385 4861 scope.go:117] "RemoveContainer" containerID="c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.674074 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf\": container with ID starting with c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf not found: ID does not exist" containerID="c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.674096 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf"} err="failed to get container status \"c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf\": rpc error: code = NotFound desc = could not find container \"c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf\": container with ID starting with c7e090cfe01cc2ed04c1284306e47dc6e04e7f1382d99cb6eac04cdfa6255faf not found: ID does not exist" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.674109 4861 scope.go:117] "RemoveContainer" containerID="aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.674517 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584\": container with ID starting with aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584 not found: ID does not exist" containerID="aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.674536 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584"} err="failed to get container status \"aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584\": rpc error: code = NotFound desc = could not find container \"aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584\": container with ID starting with aa38e6bbdb33ec4d49e4e193b938c62b2387241609e7cb127c8b8125bf0fc584 not found: ID does not exist" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.674547 4861 scope.go:117] "RemoveContainer" containerID="6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.675056 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823\": container with ID starting with 6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823 not found: ID does not exist" containerID="6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.675077 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823"} err="failed to get container status \"6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823\": rpc error: code = NotFound desc = could not find container \"6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823\": container with ID starting with 6c6e2618aefcb90000bd7da57f3c663d9967d233c22ca7d05a0ecc361b025823 not found: ID does not exist" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.675089 4861 scope.go:117] "RemoveContainer" containerID="e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.685479 4861 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.685517 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.685550 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.685582 4861 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d8414db-04a2-4f58-a99f-73d7fd445d5d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.702593 4861 scope.go:117] "RemoveContainer" containerID="0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.735551 4861 scope.go:117] "RemoveContainer" containerID="e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.735955 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c\": container with ID starting with e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c not found: ID does not exist" containerID="e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.735983 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c"} err="failed to get container status \"e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c\": rpc error: code = NotFound desc = could not find container \"e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c\": container with ID starting with e1033a5fff1669f552d4cb281b5a5de1171e402d8113b0569668ff3243b6054c not found: ID does not exist" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.736003 4861 scope.go:117] "RemoveContainer" containerID="0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.736204 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99\": container with ID starting with 0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99 not found: ID does not exist" containerID="0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.736327 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99"} err="failed to get container status \"0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99\": rpc error: code = NotFound desc = could not find container \"0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99\": container with ID starting with 0c7f51cb05fba6f4c3a58ad9231351ac250b63d4b77917f495fd5b86870dda99 not found: ID does not exist" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.922204 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.945029 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.959615 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.973266 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.991068 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.992396 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-central-agent" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992420 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-central-agent" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.992447 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-api" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992453 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-api" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.992464 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-log" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992469 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-log" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.992478 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="proxy-httpd" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992486 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="proxy-httpd" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.992498 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="sg-core" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992503 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="sg-core" Oct 03 13:53:58 crc kubenswrapper[4861]: E1003 13:53:58.992514 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-notification-agent" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992521 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-notification-agent" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992680 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="proxy-httpd" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992697 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-api" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992714 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-central-agent" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992724 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="sg-core" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992734 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" containerName="nova-api-log" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.992740 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" containerName="ceilometer-notification-agent" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.993929 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.997537 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:53:58 crc kubenswrapper[4861]: I1003 13:53:58.997760 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:58.999175 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.007300 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.015855 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.027124 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.027242 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.031923 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.031969 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.032177 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.094114 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-public-tls-certs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.094156 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh7qv\" (UniqueName: \"kubernetes.io/projected/baaeffdf-d905-4d99-905e-00944c02237b-kube-api-access-gh7qv\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.094274 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.094294 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.094350 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baaeffdf-d905-4d99-905e-00944c02237b-logs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.094378 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-config-data\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196292 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baaeffdf-d905-4d99-905e-00944c02237b-logs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196337 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14629b29-a838-444c-9acf-42c14b7dbe5a-log-httpd\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196384 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-config-data\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196447 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196465 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14629b29-a838-444c-9acf-42c14b7dbe5a-run-httpd\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196485 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196504 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-public-tls-certs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196527 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh7qv\" (UniqueName: \"kubernetes.io/projected/baaeffdf-d905-4d99-905e-00944c02237b-kube-api-access-gh7qv\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196556 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z77d7\" (UniqueName: \"kubernetes.io/projected/14629b29-a838-444c-9acf-42c14b7dbe5a-kube-api-access-z77d7\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196601 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196614 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-scripts\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196630 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196651 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-config-data\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.196861 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baaeffdf-d905-4d99-905e-00944c02237b-logs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.200788 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.200867 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-public-tls-certs\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.201252 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-config-data\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.215598 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.218208 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh7qv\" (UniqueName: \"kubernetes.io/projected/baaeffdf-d905-4d99-905e-00944c02237b-kube-api-access-gh7qv\") pod \"nova-api-0\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298516 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298582 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z77d7\" (UniqueName: \"kubernetes.io/projected/14629b29-a838-444c-9acf-42c14b7dbe5a-kube-api-access-z77d7\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298605 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-scripts\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298631 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-config-data\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298672 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14629b29-a838-444c-9acf-42c14b7dbe5a-log-httpd\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298738 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298758 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14629b29-a838-444c-9acf-42c14b7dbe5a-run-httpd\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.298776 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.299270 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14629b29-a838-444c-9acf-42c14b7dbe5a-log-httpd\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.299762 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14629b29-a838-444c-9acf-42c14b7dbe5a-run-httpd\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.302694 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-scripts\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.303471 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-config-data\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.303610 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.303889 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.318588 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14629b29-a838-444c-9acf-42c14b7dbe5a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.318738 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z77d7\" (UniqueName: \"kubernetes.io/projected/14629b29-a838-444c-9acf-42c14b7dbe5a-kube-api-access-z77d7\") pod \"ceilometer-0\" (UID: \"14629b29-a838-444c-9acf-42c14b7dbe5a\") " pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.339907 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.373706 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.902741 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:53:59 crc kubenswrapper[4861]: I1003 13:53:59.981206 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:53:59 crc kubenswrapper[4861]: W1003 13:53:59.988460 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14629b29_a838_444c_9acf_42c14b7dbe5a.slice/crio-4f109cb18dcae43faa235319009afccba778b75280951a9050da3c73efe76048 WatchSource:0}: Error finding container 4f109cb18dcae43faa235319009afccba778b75280951a9050da3c73efe76048: Status 404 returned error can't find the container with id 4f109cb18dcae43faa235319009afccba778b75280951a9050da3c73efe76048 Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.145302 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.145353 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.145392 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.146126 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"13a0d25a9a90da6fc94ead2cdfeed1d7dc6194708455cf7dc135deca83d68d28"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.146175 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://13a0d25a9a90da6fc94ead2cdfeed1d7dc6194708455cf7dc135deca83d68d28" gracePeriod=600 Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.629005 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="13a0d25a9a90da6fc94ead2cdfeed1d7dc6194708455cf7dc135deca83d68d28" exitCode=0 Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.629199 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"13a0d25a9a90da6fc94ead2cdfeed1d7dc6194708455cf7dc135deca83d68d28"} Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.629453 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c"} Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.629479 4861 scope.go:117] "RemoveContainer" containerID="7c374cec0027a71985e4c5ed0abe80567ca6f2e53b91f0c5eb7af2198510c7d3" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.632240 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"14629b29-a838-444c-9acf-42c14b7dbe5a","Type":"ContainerStarted","Data":"4f109cb18dcae43faa235319009afccba778b75280951a9050da3c73efe76048"} Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.634266 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"baaeffdf-d905-4d99-905e-00944c02237b","Type":"ContainerStarted","Data":"9f968a5aff9f9669f2fb38b98aa20015ff77140eded634f8ae69366ebce6cb8f"} Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.634425 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"baaeffdf-d905-4d99-905e-00944c02237b","Type":"ContainerStarted","Data":"4ac1345636d9916e8f3a75f8f6f3fdd485ec93b3cb12dfdafb0f71e8b61b07ec"} Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.634517 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"baaeffdf-d905-4d99-905e-00944c02237b","Type":"ContainerStarted","Data":"d61dd65263fa80ca813eddfac5a7b728b1b9d1605ad151182e619b821c38b476"} Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.696636 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.6966223080000002 podStartE2EDuration="2.696622308s" podCreationTimestamp="2025-10-03 13:53:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:54:00.695094828 +0000 UTC m=+1354.693079875" watchObservedRunningTime="2025-10-03 13:54:00.696622308 +0000 UTC m=+1354.694607355" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.704610 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d8414db-04a2-4f58-a99f-73d7fd445d5d" path="/var/lib/kubelet/pods/2d8414db-04a2-4f58-a99f-73d7fd445d5d/volumes" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.705501 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd1efd4a-2b99-445d-b2af-4f129db94e2c" path="/var/lib/kubelet/pods/fd1efd4a-2b99-445d-b2af-4f129db94e2c/volumes" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.762882 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:54:00 crc kubenswrapper[4861]: I1003 13:54:00.782787 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.645973 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"14629b29-a838-444c-9acf-42c14b7dbe5a","Type":"ContainerStarted","Data":"e49ba88dcfb0d0d2536744db651495c6f724fd89e2c0ee78e28b06f2e78302e6"} Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.646544 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"14629b29-a838-444c-9acf-42c14b7dbe5a","Type":"ContainerStarted","Data":"7ee088c52861902c0684f74a702865fc0ca71346b6319375cbf6c1600c369745"} Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.663219 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.830328 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-867t8"] Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.831759 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.835986 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.835992 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.849925 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-867t8"] Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.955557 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.955656 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-scripts\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.955745 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvcc7\" (UniqueName: \"kubernetes.io/projected/d7c9c017-a9df-4899-81ec-1fc7181f2414-kube-api-access-cvcc7\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:01 crc kubenswrapper[4861]: I1003 13:54:01.955776 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-config-data\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.058998 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.059099 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-scripts\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.059188 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvcc7\" (UniqueName: \"kubernetes.io/projected/d7c9c017-a9df-4899-81ec-1fc7181f2414-kube-api-access-cvcc7\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.059221 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-config-data\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.065094 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-scripts\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.065419 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-config-data\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.066896 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.080514 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvcc7\" (UniqueName: \"kubernetes.io/projected/d7c9c017-a9df-4899-81ec-1fc7181f2414-kube-api-access-cvcc7\") pod \"nova-cell1-cell-mapping-867t8\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.156390 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.329520 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.411544 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-zlqcc"] Oct 03 13:54:02 crc kubenswrapper[4861]: I1003 13:54:02.411815 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="dnsmasq-dns" containerID="cri-o://6f940b57e91d29c6d4b325f3701d47acbf9d0f02f8d006df7ac7a1639566303c" gracePeriod=10 Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:02.526550 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.186:5353: connect: connection refused" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:02.679140 4861 generic.go:334] "Generic (PLEG): container finished" podID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerID="6f940b57e91d29c6d4b325f3701d47acbf9d0f02f8d006df7ac7a1639566303c" exitCode=0 Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:02.679266 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" event={"ID":"db0287ff-e926-428b-a4e0-4dd1e3b40b66","Type":"ContainerDied","Data":"6f940b57e91d29c6d4b325f3701d47acbf9d0f02f8d006df7ac7a1639566303c"} Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:02.708576 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"14629b29-a838-444c-9acf-42c14b7dbe5a","Type":"ContainerStarted","Data":"e5f8624eae701faf71125837f263fc21a1a994e362b54c2779709f9c8fd69f5f"} Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:02.729258 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-867t8"] Oct 03 13:54:03 crc kubenswrapper[4861]: W1003 13:54:02.751396 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7c9c017_a9df_4899_81ec_1fc7181f2414.slice/crio-5726b96558232a57e8a11171e2f092337ced9658f866b56e5992bdbc79f4a4a0 WatchSource:0}: Error finding container 5726b96558232a57e8a11171e2f092337ced9658f866b56e5992bdbc79f4a4a0: Status 404 returned error can't find the container with id 5726b96558232a57e8a11171e2f092337ced9658f866b56e5992bdbc79f4a4a0 Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.804021 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-867t8" event={"ID":"d7c9c017-a9df-4899-81ec-1fc7181f2414","Type":"ContainerStarted","Data":"8ce9f374c14ab77ff7df53e963f5059ca15b3e8d1d02dbbbb6b06f2df3fbac1e"} Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.806246 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-867t8" event={"ID":"d7c9c017-a9df-4899-81ec-1fc7181f2414","Type":"ContainerStarted","Data":"5726b96558232a57e8a11171e2f092337ced9658f866b56e5992bdbc79f4a4a0"} Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.811454 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.811596 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" event={"ID":"db0287ff-e926-428b-a4e0-4dd1e3b40b66","Type":"ContainerDied","Data":"5c9c495a1859c49850187d9c60ea79a2ef821e94017b7f47088c828c3f0acef6"} Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.811629 4861 scope.go:117] "RemoveContainer" containerID="6f940b57e91d29c6d4b325f3701d47acbf9d0f02f8d006df7ac7a1639566303c" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.825636 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-867t8" podStartSLOduration=2.825620853 podStartE2EDuration="2.825620853s" podCreationTimestamp="2025-10-03 13:54:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:54:03.82550556 +0000 UTC m=+1357.823490607" watchObservedRunningTime="2025-10-03 13:54:03.825620853 +0000 UTC m=+1357.823605890" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.829721 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-nb\") pod \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.829824 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-sb\") pod \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.829857 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sl6vd\" (UniqueName: \"kubernetes.io/projected/db0287ff-e926-428b-a4e0-4dd1e3b40b66-kube-api-access-sl6vd\") pod \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.829916 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-config\") pod \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.830012 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-svc\") pod \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.830075 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-swift-storage-0\") pod \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\" (UID: \"db0287ff-e926-428b-a4e0-4dd1e3b40b66\") " Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.858342 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db0287ff-e926-428b-a4e0-4dd1e3b40b66-kube-api-access-sl6vd" (OuterVolumeSpecName: "kube-api-access-sl6vd") pod "db0287ff-e926-428b-a4e0-4dd1e3b40b66" (UID: "db0287ff-e926-428b-a4e0-4dd1e3b40b66"). InnerVolumeSpecName "kube-api-access-sl6vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.867401 4861 scope.go:117] "RemoveContainer" containerID="f3e2aa3585fa89ee119400a031ddc6fdd92cd4c4f4bf88bf7db0082c7bdf1f7a" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.935028 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sl6vd\" (UniqueName: \"kubernetes.io/projected/db0287ff-e926-428b-a4e0-4dd1e3b40b66-kube-api-access-sl6vd\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:03 crc kubenswrapper[4861]: I1003 13:54:03.990488 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "db0287ff-e926-428b-a4e0-4dd1e3b40b66" (UID: "db0287ff-e926-428b-a4e0-4dd1e3b40b66"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.000081 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "db0287ff-e926-428b-a4e0-4dd1e3b40b66" (UID: "db0287ff-e926-428b-a4e0-4dd1e3b40b66"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.013701 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "db0287ff-e926-428b-a4e0-4dd1e3b40b66" (UID: "db0287ff-e926-428b-a4e0-4dd1e3b40b66"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.033650 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "db0287ff-e926-428b-a4e0-4dd1e3b40b66" (UID: "db0287ff-e926-428b-a4e0-4dd1e3b40b66"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.037520 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.037547 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.037561 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.037571 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.041723 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-config" (OuterVolumeSpecName: "config") pod "db0287ff-e926-428b-a4e0-4dd1e3b40b66" (UID: "db0287ff-e926-428b-a4e0-4dd1e3b40b66"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.138406 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0287ff-e926-428b-a4e0-4dd1e3b40b66-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.822049 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"14629b29-a838-444c-9acf-42c14b7dbe5a","Type":"ContainerStarted","Data":"96c063650b29dd92e7ad632d6783fd1c04ab9c4a9d0baf2642f51267d58b8dc2"} Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.822312 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.823458 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-zlqcc" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.851868 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.9773130070000002 podStartE2EDuration="6.851847806s" podCreationTimestamp="2025-10-03 13:53:58 +0000 UTC" firstStartedPulling="2025-10-03 13:53:59.992497582 +0000 UTC m=+1353.990482629" lastFinishedPulling="2025-10-03 13:54:03.867032381 +0000 UTC m=+1357.865017428" observedRunningTime="2025-10-03 13:54:04.844767587 +0000 UTC m=+1358.842752634" watchObservedRunningTime="2025-10-03 13:54:04.851847806 +0000 UTC m=+1358.849832853" Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.876419 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-zlqcc"] Oct 03 13:54:04 crc kubenswrapper[4861]: I1003 13:54:04.885546 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-zlqcc"] Oct 03 13:54:06 crc kubenswrapper[4861]: I1003 13:54:06.691896 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" path="/var/lib/kubelet/pods/db0287ff-e926-428b-a4e0-4dd1e3b40b66/volumes" Oct 03 13:54:08 crc kubenswrapper[4861]: I1003 13:54:08.886612 4861 generic.go:334] "Generic (PLEG): container finished" podID="d7c9c017-a9df-4899-81ec-1fc7181f2414" containerID="8ce9f374c14ab77ff7df53e963f5059ca15b3e8d1d02dbbbb6b06f2df3fbac1e" exitCode=0 Oct 03 13:54:08 crc kubenswrapper[4861]: I1003 13:54:08.886686 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-867t8" event={"ID":"d7c9c017-a9df-4899-81ec-1fc7181f2414","Type":"ContainerDied","Data":"8ce9f374c14ab77ff7df53e963f5059ca15b3e8d1d02dbbbb6b06f2df3fbac1e"} Oct 03 13:54:09 crc kubenswrapper[4861]: I1003 13:54:09.340493 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:54:09 crc kubenswrapper[4861]: I1003 13:54:09.340543 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.313073 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.355476 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.355819 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.456360 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-combined-ca-bundle\") pod \"d7c9c017-a9df-4899-81ec-1fc7181f2414\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.456514 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-scripts\") pod \"d7c9c017-a9df-4899-81ec-1fc7181f2414\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.456605 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvcc7\" (UniqueName: \"kubernetes.io/projected/d7c9c017-a9df-4899-81ec-1fc7181f2414-kube-api-access-cvcc7\") pod \"d7c9c017-a9df-4899-81ec-1fc7181f2414\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.456667 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-config-data\") pod \"d7c9c017-a9df-4899-81ec-1fc7181f2414\" (UID: \"d7c9c017-a9df-4899-81ec-1fc7181f2414\") " Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.467436 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-scripts" (OuterVolumeSpecName: "scripts") pod "d7c9c017-a9df-4899-81ec-1fc7181f2414" (UID: "d7c9c017-a9df-4899-81ec-1fc7181f2414"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.477700 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7c9c017-a9df-4899-81ec-1fc7181f2414-kube-api-access-cvcc7" (OuterVolumeSpecName: "kube-api-access-cvcc7") pod "d7c9c017-a9df-4899-81ec-1fc7181f2414" (UID: "d7c9c017-a9df-4899-81ec-1fc7181f2414"). InnerVolumeSpecName "kube-api-access-cvcc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.507437 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7c9c017-a9df-4899-81ec-1fc7181f2414" (UID: "d7c9c017-a9df-4899-81ec-1fc7181f2414"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.515314 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-config-data" (OuterVolumeSpecName: "config-data") pod "d7c9c017-a9df-4899-81ec-1fc7181f2414" (UID: "d7c9c017-a9df-4899-81ec-1fc7181f2414"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.558633 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvcc7\" (UniqueName: \"kubernetes.io/projected/d7c9c017-a9df-4899-81ec-1fc7181f2414-kube-api-access-cvcc7\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.558672 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.558685 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.558698 4861 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7c9c017-a9df-4899-81ec-1fc7181f2414-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.904682 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-867t8" event={"ID":"d7c9c017-a9df-4899-81ec-1fc7181f2414","Type":"ContainerDied","Data":"5726b96558232a57e8a11171e2f092337ced9658f866b56e5992bdbc79f4a4a0"} Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.904717 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5726b96558232a57e8a11171e2f092337ced9658f866b56e5992bdbc79f4a4a0" Oct 03 13:54:10 crc kubenswrapper[4861]: I1003 13:54:10.904938 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-867t8" Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.194020 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.194624 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-log" containerID="cri-o://4ac1345636d9916e8f3a75f8f6f3fdd485ec93b3cb12dfdafb0f71e8b61b07ec" gracePeriod=30 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.194681 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-api" containerID="cri-o://9f968a5aff9f9669f2fb38b98aa20015ff77140eded634f8ae69366ebce6cb8f" gracePeriod=30 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.206382 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.206631 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="fabf66cd-aed6-4943-8ee6-6546c572cb91" containerName="nova-scheduler-scheduler" containerID="cri-o://4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" gracePeriod=30 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.266802 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.267087 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-log" containerID="cri-o://1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a" gracePeriod=30 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.267301 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-metadata" containerID="cri-o://cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8" gracePeriod=30 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.915366 4861 generic.go:334] "Generic (PLEG): container finished" podID="baaeffdf-d905-4d99-905e-00944c02237b" containerID="4ac1345636d9916e8f3a75f8f6f3fdd485ec93b3cb12dfdafb0f71e8b61b07ec" exitCode=143 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.915452 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"baaeffdf-d905-4d99-905e-00944c02237b","Type":"ContainerDied","Data":"4ac1345636d9916e8f3a75f8f6f3fdd485ec93b3cb12dfdafb0f71e8b61b07ec"} Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.917257 4861 generic.go:334] "Generic (PLEG): container finished" podID="02a413a1-1260-4c95-8513-c3266a085870" containerID="1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a" exitCode=143 Oct 03 13:54:11 crc kubenswrapper[4861]: I1003 13:54:11.917291 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02a413a1-1260-4c95-8513-c3266a085870","Type":"ContainerDied","Data":"1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a"} Oct 03 13:54:14 crc kubenswrapper[4861]: E1003 13:54:14.697691 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a is running failed: container process not found" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:54:14 crc kubenswrapper[4861]: E1003 13:54:14.698678 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a is running failed: container process not found" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:54:14 crc kubenswrapper[4861]: E1003 13:54:14.698943 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a is running failed: container process not found" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:54:14 crc kubenswrapper[4861]: E1003 13:54:14.698991 4861 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="fabf66cd-aed6-4943-8ee6-6546c572cb91" containerName="nova-scheduler-scheduler" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.732080 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.838978 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhfdc\" (UniqueName: \"kubernetes.io/projected/fabf66cd-aed6-4943-8ee6-6546c572cb91-kube-api-access-lhfdc\") pod \"fabf66cd-aed6-4943-8ee6-6546c572cb91\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.839051 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-config-data\") pod \"fabf66cd-aed6-4943-8ee6-6546c572cb91\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.839098 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-combined-ca-bundle\") pod \"fabf66cd-aed6-4943-8ee6-6546c572cb91\" (UID: \"fabf66cd-aed6-4943-8ee6-6546c572cb91\") " Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.845545 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fabf66cd-aed6-4943-8ee6-6546c572cb91-kube-api-access-lhfdc" (OuterVolumeSpecName: "kube-api-access-lhfdc") pod "fabf66cd-aed6-4943-8ee6-6546c572cb91" (UID: "fabf66cd-aed6-4943-8ee6-6546c572cb91"). InnerVolumeSpecName "kube-api-access-lhfdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.878303 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-config-data" (OuterVolumeSpecName: "config-data") pod "fabf66cd-aed6-4943-8ee6-6546c572cb91" (UID: "fabf66cd-aed6-4943-8ee6-6546c572cb91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.879333 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fabf66cd-aed6-4943-8ee6-6546c572cb91" (UID: "fabf66cd-aed6-4943-8ee6-6546c572cb91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.908013 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:33164->10.217.0.191:8775: read: connection reset by peer" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.908411 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:33162->10.217.0.191:8775: read: connection reset by peer" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.942710 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhfdc\" (UniqueName: \"kubernetes.io/projected/fabf66cd-aed6-4943-8ee6-6546c572cb91-kube-api-access-lhfdc\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.942735 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.942745 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fabf66cd-aed6-4943-8ee6-6546c572cb91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.944957 4861 generic.go:334] "Generic (PLEG): container finished" podID="fabf66cd-aed6-4943-8ee6-6546c572cb91" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" exitCode=0 Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.944990 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fabf66cd-aed6-4943-8ee6-6546c572cb91","Type":"ContainerDied","Data":"4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a"} Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.945012 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fabf66cd-aed6-4943-8ee6-6546c572cb91","Type":"ContainerDied","Data":"038a86d8d1582d33f526ff90330ce2a172705538bb354dc1281bd1a622cfd5b3"} Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.945029 4861 scope.go:117] "RemoveContainer" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" Oct 03 13:54:14 crc kubenswrapper[4861]: I1003 13:54:14.945134 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.014654 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.020587 4861 scope.go:117] "RemoveContainer" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" Oct 03 13:54:15 crc kubenswrapper[4861]: E1003 13:54:15.022935 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a\": container with ID starting with 4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a not found: ID does not exist" containerID="4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.022983 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a"} err="failed to get container status \"4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a\": rpc error: code = NotFound desc = could not find container \"4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a\": container with ID starting with 4c6c6994c999addc3fc56bea7791eb4c1521e12cc2024fc440a3fd1ea808a37a not found: ID does not exist" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.027886 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.043648 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:54:15 crc kubenswrapper[4861]: E1003 13:54:15.044069 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fabf66cd-aed6-4943-8ee6-6546c572cb91" containerName="nova-scheduler-scheduler" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044082 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fabf66cd-aed6-4943-8ee6-6546c572cb91" containerName="nova-scheduler-scheduler" Oct 03 13:54:15 crc kubenswrapper[4861]: E1003 13:54:15.044101 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="init" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044106 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="init" Oct 03 13:54:15 crc kubenswrapper[4861]: E1003 13:54:15.044130 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7c9c017-a9df-4899-81ec-1fc7181f2414" containerName="nova-manage" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044138 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7c9c017-a9df-4899-81ec-1fc7181f2414" containerName="nova-manage" Oct 03 13:54:15 crc kubenswrapper[4861]: E1003 13:54:15.044153 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="dnsmasq-dns" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044158 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="dnsmasq-dns" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044384 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fabf66cd-aed6-4943-8ee6-6546c572cb91" containerName="nova-scheduler-scheduler" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044404 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="db0287ff-e926-428b-a4e0-4dd1e3b40b66" containerName="dnsmasq-dns" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.044420 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7c9c017-a9df-4899-81ec-1fc7181f2414" containerName="nova-manage" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.045808 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.052372 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.078870 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.146521 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c364afdb-ff79-4011-abeb-243d45ea7b95-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.146879 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2868f\" (UniqueName: \"kubernetes.io/projected/c364afdb-ff79-4011-abeb-243d45ea7b95-kube-api-access-2868f\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.146970 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c364afdb-ff79-4011-abeb-243d45ea7b95-config-data\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.250288 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2868f\" (UniqueName: \"kubernetes.io/projected/c364afdb-ff79-4011-abeb-243d45ea7b95-kube-api-access-2868f\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.250331 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c364afdb-ff79-4011-abeb-243d45ea7b95-config-data\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.250381 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c364afdb-ff79-4011-abeb-243d45ea7b95-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.262653 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c364afdb-ff79-4011-abeb-243d45ea7b95-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.273987 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c364afdb-ff79-4011-abeb-243d45ea7b95-config-data\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.279430 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2868f\" (UniqueName: \"kubernetes.io/projected/c364afdb-ff79-4011-abeb-243d45ea7b95-kube-api-access-2868f\") pod \"nova-scheduler-0\" (UID: \"c364afdb-ff79-4011-abeb-243d45ea7b95\") " pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.365381 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.553066 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.663877 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzdhl\" (UniqueName: \"kubernetes.io/projected/02a413a1-1260-4c95-8513-c3266a085870-kube-api-access-wzdhl\") pod \"02a413a1-1260-4c95-8513-c3266a085870\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.664167 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-combined-ca-bundle\") pod \"02a413a1-1260-4c95-8513-c3266a085870\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.664221 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-nova-metadata-tls-certs\") pod \"02a413a1-1260-4c95-8513-c3266a085870\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.664292 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02a413a1-1260-4c95-8513-c3266a085870-logs\") pod \"02a413a1-1260-4c95-8513-c3266a085870\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.664406 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-config-data\") pod \"02a413a1-1260-4c95-8513-c3266a085870\" (UID: \"02a413a1-1260-4c95-8513-c3266a085870\") " Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.666392 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02a413a1-1260-4c95-8513-c3266a085870-logs" (OuterVolumeSpecName: "logs") pod "02a413a1-1260-4c95-8513-c3266a085870" (UID: "02a413a1-1260-4c95-8513-c3266a085870"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.677510 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02a413a1-1260-4c95-8513-c3266a085870-kube-api-access-wzdhl" (OuterVolumeSpecName: "kube-api-access-wzdhl") pod "02a413a1-1260-4c95-8513-c3266a085870" (UID: "02a413a1-1260-4c95-8513-c3266a085870"). InnerVolumeSpecName "kube-api-access-wzdhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.706035 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02a413a1-1260-4c95-8513-c3266a085870" (UID: "02a413a1-1260-4c95-8513-c3266a085870"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.715275 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-config-data" (OuterVolumeSpecName: "config-data") pod "02a413a1-1260-4c95-8513-c3266a085870" (UID: "02a413a1-1260-4c95-8513-c3266a085870"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.765918 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzdhl\" (UniqueName: \"kubernetes.io/projected/02a413a1-1260-4c95-8513-c3266a085870-kube-api-access-wzdhl\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.765949 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.765957 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02a413a1-1260-4c95-8513-c3266a085870-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.765966 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.774786 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "02a413a1-1260-4c95-8513-c3266a085870" (UID: "02a413a1-1260-4c95-8513-c3266a085870"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.866240 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.872351 4861 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/02a413a1-1260-4c95-8513-c3266a085870-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.957278 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c364afdb-ff79-4011-abeb-243d45ea7b95","Type":"ContainerStarted","Data":"f6ebefb169b1177803a86de0cc5df0f8214c950dcc565bc5bcd1a322af31130d"} Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.964028 4861 generic.go:334] "Generic (PLEG): container finished" podID="02a413a1-1260-4c95-8513-c3266a085870" containerID="cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8" exitCode=0 Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.964070 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02a413a1-1260-4c95-8513-c3266a085870","Type":"ContainerDied","Data":"cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8"} Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.964095 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02a413a1-1260-4c95-8513-c3266a085870","Type":"ContainerDied","Data":"b12e040fa4850cc9492ec36cda3ad8a4ddf1752b8b941eaa74cb8662b866944d"} Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.964111 4861 scope.go:117] "RemoveContainer" containerID="cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8" Oct 03 13:54:15 crc kubenswrapper[4861]: I1003 13:54:15.964292 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.014424 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.024745 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.036799 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:54:16 crc kubenswrapper[4861]: E1003 13:54:16.037372 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-log" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.037414 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-log" Oct 03 13:54:16 crc kubenswrapper[4861]: E1003 13:54:16.037444 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-metadata" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.037453 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-metadata" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.037717 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-metadata" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.037744 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="02a413a1-1260-4c95-8513-c3266a085870" containerName="nova-metadata-log" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.039147 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.042020 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.042700 4861 scope.go:117] "RemoveContainer" containerID="1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.046063 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.060090 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.085835 4861 scope.go:117] "RemoveContainer" containerID="cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8" Oct 03 13:54:16 crc kubenswrapper[4861]: E1003 13:54:16.086601 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8\": container with ID starting with cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8 not found: ID does not exist" containerID="cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.086634 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8"} err="failed to get container status \"cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8\": rpc error: code = NotFound desc = could not find container \"cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8\": container with ID starting with cc1767d9373e1d3602b0168c08d6e38c68f068e1b18ef113855bd8a99e47cfc8 not found: ID does not exist" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.086653 4861 scope.go:117] "RemoveContainer" containerID="1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a" Oct 03 13:54:16 crc kubenswrapper[4861]: E1003 13:54:16.086862 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a\": container with ID starting with 1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a not found: ID does not exist" containerID="1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.086883 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a"} err="failed to get container status \"1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a\": rpc error: code = NotFound desc = could not find container \"1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a\": container with ID starting with 1afaf57b0d18a18001b189539a8b0bc580d485fcc64ab94c9106b6b3a4b1ea3a not found: ID does not exist" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.178756 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.178866 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-config-data\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.179026 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92433e8c-4d6d-4a9d-a492-192863eed46c-logs\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.179063 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wckz\" (UniqueName: \"kubernetes.io/projected/92433e8c-4d6d-4a9d-a492-192863eed46c-kube-api-access-4wckz\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.179094 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.280657 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92433e8c-4d6d-4a9d-a492-192863eed46c-logs\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.280717 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wckz\" (UniqueName: \"kubernetes.io/projected/92433e8c-4d6d-4a9d-a492-192863eed46c-kube-api-access-4wckz\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.280742 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.280783 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.280843 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-config-data\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.281706 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92433e8c-4d6d-4a9d-a492-192863eed46c-logs\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.286751 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-config-data\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.291872 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.292349 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92433e8c-4d6d-4a9d-a492-192863eed46c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.297040 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wckz\" (UniqueName: \"kubernetes.io/projected/92433e8c-4d6d-4a9d-a492-192863eed46c-kube-api-access-4wckz\") pod \"nova-metadata-0\" (UID: \"92433e8c-4d6d-4a9d-a492-192863eed46c\") " pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.358758 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.692773 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02a413a1-1260-4c95-8513-c3266a085870" path="/var/lib/kubelet/pods/02a413a1-1260-4c95-8513-c3266a085870/volumes" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.694082 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fabf66cd-aed6-4943-8ee6-6546c572cb91" path="/var/lib/kubelet/pods/fabf66cd-aed6-4943-8ee6-6546c572cb91/volumes" Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.881141 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.980449 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92433e8c-4d6d-4a9d-a492-192863eed46c","Type":"ContainerStarted","Data":"0bfd94c5315aa1048c712e8715cf5288ccc95cfa8c5efb206cfa6471f7329f76"} Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.982322 4861 generic.go:334] "Generic (PLEG): container finished" podID="baaeffdf-d905-4d99-905e-00944c02237b" containerID="9f968a5aff9f9669f2fb38b98aa20015ff77140eded634f8ae69366ebce6cb8f" exitCode=0 Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.982380 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"baaeffdf-d905-4d99-905e-00944c02237b","Type":"ContainerDied","Data":"9f968a5aff9f9669f2fb38b98aa20015ff77140eded634f8ae69366ebce6cb8f"} Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.984168 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c364afdb-ff79-4011-abeb-243d45ea7b95","Type":"ContainerStarted","Data":"1bccf52d4474b678620d1a99b5a4a42665b141a8d7765feb08a704213d2a0dad"} Oct 03 13:54:16 crc kubenswrapper[4861]: I1003 13:54:16.996545 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.015127 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.015103765 podStartE2EDuration="2.015103765s" podCreationTimestamp="2025-10-03 13:54:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:54:17.000845838 +0000 UTC m=+1370.998830895" watchObservedRunningTime="2025-10-03 13:54:17.015103765 +0000 UTC m=+1371.013088812" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.097528 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh7qv\" (UniqueName: \"kubernetes.io/projected/baaeffdf-d905-4d99-905e-00944c02237b-kube-api-access-gh7qv\") pod \"baaeffdf-d905-4d99-905e-00944c02237b\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.097589 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-combined-ca-bundle\") pod \"baaeffdf-d905-4d99-905e-00944c02237b\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.097672 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-internal-tls-certs\") pod \"baaeffdf-d905-4d99-905e-00944c02237b\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.097707 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-public-tls-certs\") pod \"baaeffdf-d905-4d99-905e-00944c02237b\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.097722 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-config-data\") pod \"baaeffdf-d905-4d99-905e-00944c02237b\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.097761 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baaeffdf-d905-4d99-905e-00944c02237b-logs\") pod \"baaeffdf-d905-4d99-905e-00944c02237b\" (UID: \"baaeffdf-d905-4d99-905e-00944c02237b\") " Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.100145 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baaeffdf-d905-4d99-905e-00944c02237b-logs" (OuterVolumeSpecName: "logs") pod "baaeffdf-d905-4d99-905e-00944c02237b" (UID: "baaeffdf-d905-4d99-905e-00944c02237b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.104354 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baaeffdf-d905-4d99-905e-00944c02237b-kube-api-access-gh7qv" (OuterVolumeSpecName: "kube-api-access-gh7qv") pod "baaeffdf-d905-4d99-905e-00944c02237b" (UID: "baaeffdf-d905-4d99-905e-00944c02237b"). InnerVolumeSpecName "kube-api-access-gh7qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.131802 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baaeffdf-d905-4d99-905e-00944c02237b" (UID: "baaeffdf-d905-4d99-905e-00944c02237b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.156003 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-config-data" (OuterVolumeSpecName: "config-data") pod "baaeffdf-d905-4d99-905e-00944c02237b" (UID: "baaeffdf-d905-4d99-905e-00944c02237b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.161283 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "baaeffdf-d905-4d99-905e-00944c02237b" (UID: "baaeffdf-d905-4d99-905e-00944c02237b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.187821 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "baaeffdf-d905-4d99-905e-00944c02237b" (UID: "baaeffdf-d905-4d99-905e-00944c02237b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.199863 4861 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baaeffdf-d905-4d99-905e-00944c02237b-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.199894 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh7qv\" (UniqueName: \"kubernetes.io/projected/baaeffdf-d905-4d99-905e-00944c02237b-kube-api-access-gh7qv\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.199904 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.199912 4861 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.199920 4861 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.199928 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baaeffdf-d905-4d99-905e-00944c02237b-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.994178 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92433e8c-4d6d-4a9d-a492-192863eed46c","Type":"ContainerStarted","Data":"68b75d53909653ebabd313e8d4fcef61c6317def21190fbbb08044aeee80ef18"} Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.994261 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92433e8c-4d6d-4a9d-a492-192863eed46c","Type":"ContainerStarted","Data":"3c95691eccdbf51dc83ad0869b51283340eab4e8c5c1b6daf7afd3a47d36ca6a"} Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.996770 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.997554 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"baaeffdf-d905-4d99-905e-00944c02237b","Type":"ContainerDied","Data":"d61dd65263fa80ca813eddfac5a7b728b1b9d1605ad151182e619b821c38b476"} Oct 03 13:54:17 crc kubenswrapper[4861]: I1003 13:54:17.997617 4861 scope.go:117] "RemoveContainer" containerID="9f968a5aff9f9669f2fb38b98aa20015ff77140eded634f8ae69366ebce6cb8f" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.017419 4861 scope.go:117] "RemoveContainer" containerID="4ac1345636d9916e8f3a75f8f6f3fdd485ec93b3cb12dfdafb0f71e8b61b07ec" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.049423 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.049404482 podStartE2EDuration="2.049404482s" podCreationTimestamp="2025-10-03 13:54:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:54:18.025776169 +0000 UTC m=+1372.023761216" watchObservedRunningTime="2025-10-03 13:54:18.049404482 +0000 UTC m=+1372.047389529" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.061987 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.075582 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.089943 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:54:18 crc kubenswrapper[4861]: E1003 13:54:18.090440 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-api" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.090465 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-api" Oct 03 13:54:18 crc kubenswrapper[4861]: E1003 13:54:18.090499 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-log" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.090508 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-log" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.090728 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-log" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.090755 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="baaeffdf-d905-4d99-905e-00944c02237b" containerName="nova-api-api" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.091804 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.095382 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.095599 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.095725 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.110078 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.218189 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.218265 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.218312 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnlpr\" (UniqueName: \"kubernetes.io/projected/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-kube-api-access-gnlpr\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.218367 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-logs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.218401 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-public-tls-certs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.218465 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-config-data\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.320123 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-config-data\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.320293 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.320466 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.320997 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnlpr\" (UniqueName: \"kubernetes.io/projected/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-kube-api-access-gnlpr\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.321068 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-logs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.321110 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-public-tls-certs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.321898 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-logs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.323637 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.325798 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.337953 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-config-data\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.338726 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-public-tls-certs\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.341206 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnlpr\" (UniqueName: \"kubernetes.io/projected/9f7bb8d1-2c92-4ce3-b510-386e42fab1ac-kube-api-access-gnlpr\") pod \"nova-api-0\" (UID: \"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac\") " pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.412278 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.735935 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baaeffdf-d905-4d99-905e-00944c02237b" path="/var/lib/kubelet/pods/baaeffdf-d905-4d99-905e-00944c02237b/volumes" Oct 03 13:54:18 crc kubenswrapper[4861]: I1003 13:54:18.934843 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:54:19 crc kubenswrapper[4861]: I1003 13:54:19.013493 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac","Type":"ContainerStarted","Data":"552d8f40daedd96c2207ba79f2befe00171a0d099a534fd4406ca4a85c9afda0"} Oct 03 13:54:20 crc kubenswrapper[4861]: I1003 13:54:20.028493 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac","Type":"ContainerStarted","Data":"c086d56f55c423fb5abd439226cb909c95fdf560f7113b3f38c0c026fec4ccab"} Oct 03 13:54:20 crc kubenswrapper[4861]: I1003 13:54:20.028952 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f7bb8d1-2c92-4ce3-b510-386e42fab1ac","Type":"ContainerStarted","Data":"9e539b971b39108f50217525d4250a883edf71028f248d11e46a14f500738737"} Oct 03 13:54:20 crc kubenswrapper[4861]: I1003 13:54:20.059344 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.059308949 podStartE2EDuration="2.059308949s" podCreationTimestamp="2025-10-03 13:54:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:54:20.054822301 +0000 UTC m=+1374.052807348" watchObservedRunningTime="2025-10-03 13:54:20.059308949 +0000 UTC m=+1374.057294006" Oct 03 13:54:20 crc kubenswrapper[4861]: I1003 13:54:20.366284 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 13:54:21 crc kubenswrapper[4861]: I1003 13:54:21.359357 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:54:21 crc kubenswrapper[4861]: I1003 13:54:21.359405 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:54:25 crc kubenswrapper[4861]: I1003 13:54:25.366581 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 13:54:25 crc kubenswrapper[4861]: I1003 13:54:25.401475 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 13:54:26 crc kubenswrapper[4861]: I1003 13:54:26.121211 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 13:54:26 crc kubenswrapper[4861]: I1003 13:54:26.360688 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:54:26 crc kubenswrapper[4861]: I1003 13:54:26.360729 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:54:27 crc kubenswrapper[4861]: I1003 13:54:27.377583 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="92433e8c-4d6d-4a9d-a492-192863eed46c" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:54:27 crc kubenswrapper[4861]: I1003 13:54:27.377612 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="92433e8c-4d6d-4a9d-a492-192863eed46c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:54:28 crc kubenswrapper[4861]: I1003 13:54:28.412456 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:54:28 crc kubenswrapper[4861]: I1003 13:54:28.412513 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:54:29 crc kubenswrapper[4861]: I1003 13:54:29.402880 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 13:54:29 crc kubenswrapper[4861]: I1003 13:54:29.433434 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9f7bb8d1-2c92-4ce3-b510-386e42fab1ac" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:54:29 crc kubenswrapper[4861]: I1003 13:54:29.433830 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9f7bb8d1-2c92-4ce3-b510-386e42fab1ac" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.016178 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vwpt4"] Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.019537 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.026850 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vwpt4"] Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.143796 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-utilities\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.143890 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-catalog-content\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.143949 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4x26\" (UniqueName: \"kubernetes.io/projected/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-kube-api-access-f4x26\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.245386 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-utilities\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.245475 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-catalog-content\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.245529 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4x26\" (UniqueName: \"kubernetes.io/projected/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-kube-api-access-f4x26\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.245951 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-utilities\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.246014 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-catalog-content\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.270411 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4x26\" (UniqueName: \"kubernetes.io/projected/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-kube-api-access-f4x26\") pod \"redhat-operators-vwpt4\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.351248 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:32 crc kubenswrapper[4861]: I1003 13:54:32.832823 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vwpt4"] Oct 03 13:54:33 crc kubenswrapper[4861]: I1003 13:54:33.158818 4861 generic.go:334] "Generic (PLEG): container finished" podID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerID="b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7" exitCode=0 Oct 03 13:54:33 crc kubenswrapper[4861]: I1003 13:54:33.158891 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerDied","Data":"b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7"} Oct 03 13:54:33 crc kubenswrapper[4861]: I1003 13:54:33.158941 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerStarted","Data":"4f8b8d02a231a8e1ef15a935ecbe2c1a53e65edaa814918115a8f568278ff7f0"} Oct 03 13:54:35 crc kubenswrapper[4861]: I1003 13:54:35.178713 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerStarted","Data":"afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77"} Oct 03 13:54:36 crc kubenswrapper[4861]: I1003 13:54:36.458538 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:54:36 crc kubenswrapper[4861]: I1003 13:54:36.459244 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:54:36 crc kubenswrapper[4861]: I1003 13:54:36.466794 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:54:37 crc kubenswrapper[4861]: I1003 13:54:37.213529 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:54:38 crc kubenswrapper[4861]: I1003 13:54:38.419867 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:54:38 crc kubenswrapper[4861]: I1003 13:54:38.420813 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:54:38 crc kubenswrapper[4861]: I1003 13:54:38.429629 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:54:38 crc kubenswrapper[4861]: I1003 13:54:38.432750 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:54:39 crc kubenswrapper[4861]: I1003 13:54:39.219469 4861 generic.go:334] "Generic (PLEG): container finished" podID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerID="afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77" exitCode=0 Oct 03 13:54:39 crc kubenswrapper[4861]: I1003 13:54:39.219531 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerDied","Data":"afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77"} Oct 03 13:54:39 crc kubenswrapper[4861]: I1003 13:54:39.220109 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:54:39 crc kubenswrapper[4861]: I1003 13:54:39.232813 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:54:40 crc kubenswrapper[4861]: I1003 13:54:40.231390 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerStarted","Data":"25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059"} Oct 03 13:54:40 crc kubenswrapper[4861]: I1003 13:54:40.256765 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vwpt4" podStartSLOduration=2.731902981 podStartE2EDuration="9.256749877s" podCreationTimestamp="2025-10-03 13:54:31 +0000 UTC" firstStartedPulling="2025-10-03 13:54:33.160329596 +0000 UTC m=+1387.158314643" lastFinishedPulling="2025-10-03 13:54:39.685176482 +0000 UTC m=+1393.683161539" observedRunningTime="2025-10-03 13:54:40.25342893 +0000 UTC m=+1394.251413967" watchObservedRunningTime="2025-10-03 13:54:40.256749877 +0000 UTC m=+1394.254734924" Oct 03 13:54:42 crc kubenswrapper[4861]: I1003 13:54:42.352405 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:42 crc kubenswrapper[4861]: I1003 13:54:42.352704 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:54:43 crc kubenswrapper[4861]: I1003 13:54:43.400442 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vwpt4" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" probeResult="failure" output=< Oct 03 13:54:43 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 13:54:43 crc kubenswrapper[4861]: > Oct 03 13:54:48 crc kubenswrapper[4861]: I1003 13:54:48.245972 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:54:49 crc kubenswrapper[4861]: I1003 13:54:49.109814 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:54:53 crc kubenswrapper[4861]: I1003 13:54:53.402254 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vwpt4" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" probeResult="failure" output=< Oct 03 13:54:53 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 13:54:53 crc kubenswrapper[4861]: > Oct 03 13:54:53 crc kubenswrapper[4861]: I1003 13:54:53.719469 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerName="rabbitmq" containerID="cri-o://8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8" gracePeriod=604795 Oct 03 13:54:54 crc kubenswrapper[4861]: I1003 13:54:54.065301 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerName="rabbitmq" containerID="cri-o://9d80e2d6952baf608f74f963d8e2bda4ba73dfa74ab0e13c642bc8e6396c5daf" gracePeriod=604796 Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.350835 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.411490 4861 generic.go:334] "Generic (PLEG): container finished" podID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerID="8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8" exitCode=0 Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.411546 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9bf9ee1-8038-4578-b10d-390a82c11290","Type":"ContainerDied","Data":"8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8"} Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.411571 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9bf9ee1-8038-4578-b10d-390a82c11290","Type":"ContainerDied","Data":"f5f96bd243c9a7efc6b1275195e8c08800f6a91192e5de63fda155ad336005b3"} Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.411588 4861 scope.go:117] "RemoveContainer" containerID="8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.411709 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.417113 4861 generic.go:334] "Generic (PLEG): container finished" podID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerID="9d80e2d6952baf608f74f963d8e2bda4ba73dfa74ab0e13c642bc8e6396c5daf" exitCode=0 Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.417144 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7","Type":"ContainerDied","Data":"9d80e2d6952baf608f74f963d8e2bda4ba73dfa74ab0e13c642bc8e6396c5daf"} Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.489692 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9jpr\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-kube-api-access-j9jpr\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.491750 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-confd\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.491876 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-server-conf\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.491974 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.492297 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-plugins\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.495167 4861 scope.go:117] "RemoveContainer" containerID="9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.496919 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-erlang-cookie\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.497029 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-tls\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.497140 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-plugins-conf\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.497239 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-config-data\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.497327 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9bf9ee1-8038-4578-b10d-390a82c11290-pod-info\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.497424 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9bf9ee1-8038-4578-b10d-390a82c11290-erlang-cookie-secret\") pod \"b9bf9ee1-8038-4578-b10d-390a82c11290\" (UID: \"b9bf9ee1-8038-4578-b10d-390a82c11290\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.500730 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.512996 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.515966 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b9bf9ee1-8038-4578-b10d-390a82c11290-pod-info" (OuterVolumeSpecName: "pod-info") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.526401 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-kube-api-access-j9jpr" (OuterVolumeSpecName: "kube-api-access-j9jpr") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "kube-api-access-j9jpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.531511 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.539463 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9bf9ee1-8038-4578-b10d-390a82c11290-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.550134 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.550200 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.599662 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-config-data" (OuterVolumeSpecName: "config-data") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600630 4861 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9bf9ee1-8038-4578-b10d-390a82c11290-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600657 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9jpr\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-kube-api-access-j9jpr\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600677 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600685 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600694 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600703 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600711 4861 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600720 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.600729 4861 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9bf9ee1-8038-4578-b10d-390a82c11290-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.639727 4861 scope.go:117] "RemoveContainer" containerID="8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8" Oct 03 13:55:00 crc kubenswrapper[4861]: E1003 13:55:00.658715 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8\": container with ID starting with 8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8 not found: ID does not exist" containerID="8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.658773 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8"} err="failed to get container status \"8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8\": rpc error: code = NotFound desc = could not find container \"8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8\": container with ID starting with 8dfa69721269065dad4859bc787838ef6c05081e9d3dfde8a79bec4d85aa1bb8 not found: ID does not exist" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.658800 4861 scope.go:117] "RemoveContainer" containerID="9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665" Oct 03 13:55:00 crc kubenswrapper[4861]: E1003 13:55:00.660849 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665\": container with ID starting with 9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665 not found: ID does not exist" containerID="9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.660894 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665"} err="failed to get container status \"9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665\": rpc error: code = NotFound desc = could not find container \"9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665\": container with ID starting with 9f9d39c8bf5f4551c44ca852498249659ad6b84b7d2f70a3afc57c8ccc19b665 not found: ID does not exist" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.681907 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.699560 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.702623 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.746469 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-server-conf" (OuterVolumeSpecName: "server-conf") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.812219 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-plugins-conf\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.812505 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-confd\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.812751 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-erlang-cookie-secret\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.813011 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-plugins\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.813124 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-erlang-cookie\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.813206 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-pod-info\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.813364 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-server-conf\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.813459 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-config-data\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.817279 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.817421 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fmjk\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-kube-api-access-6fmjk\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.817553 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-tls\") pod \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\" (UID: \"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7\") " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.818292 4861 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9bf9ee1-8038-4578-b10d-390a82c11290-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.816434 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.816735 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.851094 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.853839 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.854006 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-kube-api-access-6fmjk" (OuterVolumeSpecName: "kube-api-access-6fmjk") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "kube-api-access-6fmjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.854456 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.891952 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.903948 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-pod-info" (OuterVolumeSpecName: "pod-info") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924575 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924606 4861 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924615 4861 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924626 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924636 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924646 4861 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924672 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.924682 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fmjk\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-kube-api-access-6fmjk\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:00 crc kubenswrapper[4861]: I1003 13:55:00.970270 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.008453 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-config-data" (OuterVolumeSpecName: "config-data") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.026322 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.026351 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.046762 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b9bf9ee1-8038-4578-b10d-390a82c11290" (UID: "b9bf9ee1-8038-4578-b10d-390a82c11290"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.103592 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-server-conf" (OuterVolumeSpecName: "server-conf") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.127997 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9bf9ee1-8038-4578-b10d-390a82c11290-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.128025 4861 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.194096 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" (UID: "d537dadb-d98c-4ac3-ae54-fc0a9397d7d7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.229778 4861 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.351860 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.363131 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.390483 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: E1003 13:55:01.391137 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerName="setup-container" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.391280 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerName="setup-container" Oct 03 13:55:01 crc kubenswrapper[4861]: E1003 13:55:01.391368 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerName="setup-container" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.391848 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerName="setup-container" Oct 03 13:55:01 crc kubenswrapper[4861]: E1003 13:55:01.391942 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerName="rabbitmq" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.392015 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerName="rabbitmq" Oct 03 13:55:01 crc kubenswrapper[4861]: E1003 13:55:01.392110 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerName="rabbitmq" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.392193 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerName="rabbitmq" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.392545 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" containerName="rabbitmq" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.392642 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" containerName="rabbitmq" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.394038 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400223 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400308 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400347 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400521 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400568 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400758 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.400764 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-cw2gs" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.409268 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.430017 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d537dadb-d98c-4ac3-ae54-fc0a9397d7d7","Type":"ContainerDied","Data":"f1ea78a55403cb178ffafddbd1d03caff9e2ecb3c24fade79c89c4ecf8106838"} Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.430065 4861 scope.go:117] "RemoveContainer" containerID="9d80e2d6952baf608f74f963d8e2bda4ba73dfa74ab0e13c642bc8e6396c5daf" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.430151 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.476400 4861 scope.go:117] "RemoveContainer" containerID="a457e9d6f265baf52dcad5b846aa53b26ec38f6a5d91ffdde9329266fe7937eb" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.495599 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.511804 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.526589 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.528284 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.534177 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.534272 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.534174 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.534661 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-8zws5" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535008 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535133 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535153 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535824 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535859 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535892 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535925 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0494d758-5f63-45c6-930c-f34b43484fd9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535952 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0494d758-5f63-45c6-930c-f34b43484fd9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535974 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.535997 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-config-data\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.536014 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.536033 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k75pd\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-kube-api-access-k75pd\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.536072 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.536098 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.549834 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637591 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637688 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637775 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637818 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637848 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637887 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637916 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637932 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637947 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.637995 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b222a9ce-46d6-4caf-b76d-f6b773276cb1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638015 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638059 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638084 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drs6m\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-kube-api-access-drs6m\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638106 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638155 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0494d758-5f63-45c6-930c-f34b43484fd9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638183 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0494d758-5f63-45c6-930c-f34b43484fd9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638270 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638304 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-config-data\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638348 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638371 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k75pd\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-kube-api-access-k75pd\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638400 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638459 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b222a9ce-46d6-4caf-b76d-f6b773276cb1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.638507 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.639486 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.639916 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.641085 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.641248 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.641664 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0494d758-5f63-45c6-930c-f34b43484fd9-config-data\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.642525 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.642883 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.643049 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0494d758-5f63-45c6-930c-f34b43484fd9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.643550 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0494d758-5f63-45c6-930c-f34b43484fd9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.656624 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k75pd\" (UniqueName: \"kubernetes.io/projected/0494d758-5f63-45c6-930c-f34b43484fd9-kube-api-access-k75pd\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.678539 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0494d758-5f63-45c6-930c-f34b43484fd9\") " pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.721549 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740271 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b222a9ce-46d6-4caf-b76d-f6b773276cb1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740329 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740371 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drs6m\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-kube-api-access-drs6m\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740397 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740482 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740537 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b222a9ce-46d6-4caf-b76d-f6b773276cb1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740612 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740667 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740695 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740713 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.740759 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.741350 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.741843 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.744846 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.744905 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.745017 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.745113 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b222a9ce-46d6-4caf-b76d-f6b773276cb1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.745194 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.748624 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b222a9ce-46d6-4caf-b76d-f6b773276cb1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.749544 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b222a9ce-46d6-4caf-b76d-f6b773276cb1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.754481 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.762445 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drs6m\" (UniqueName: \"kubernetes.io/projected/b222a9ce-46d6-4caf-b76d-f6b773276cb1-kube-api-access-drs6m\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.784501 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b222a9ce-46d6-4caf-b76d-f6b773276cb1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:01 crc kubenswrapper[4861]: I1003 13:55:01.851203 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:02 crc kubenswrapper[4861]: I1003 13:55:02.206338 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:55:02 crc kubenswrapper[4861]: I1003 13:55:02.393145 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:55:02 crc kubenswrapper[4861]: I1003 13:55:02.453345 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0494d758-5f63-45c6-930c-f34b43484fd9","Type":"ContainerStarted","Data":"43d7a35e9b8b4aa055fde0bb64859e467175e4deed3cf525bde2ce27c644b6d4"} Oct 03 13:55:02 crc kubenswrapper[4861]: I1003 13:55:02.455905 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b222a9ce-46d6-4caf-b76d-f6b773276cb1","Type":"ContainerStarted","Data":"f56a4b6b70fdc9774e5607701d8f481344ee5f9825ad4a02be080c2bd674f954"} Oct 03 13:55:02 crc kubenswrapper[4861]: I1003 13:55:02.698342 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9bf9ee1-8038-4578-b10d-390a82c11290" path="/var/lib/kubelet/pods/b9bf9ee1-8038-4578-b10d-390a82c11290/volumes" Oct 03 13:55:02 crc kubenswrapper[4861]: I1003 13:55:02.699092 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d537dadb-d98c-4ac3-ae54-fc0a9397d7d7" path="/var/lib/kubelet/pods/d537dadb-d98c-4ac3-ae54-fc0a9397d7d7/volumes" Oct 03 13:55:03 crc kubenswrapper[4861]: I1003 13:55:03.425140 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vwpt4" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" probeResult="failure" output=< Oct 03 13:55:03 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 13:55:03 crc kubenswrapper[4861]: > Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.477544 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b222a9ce-46d6-4caf-b76d-f6b773276cb1","Type":"ContainerStarted","Data":"7301a3c29bed9ed403397d330ac8898babfd3f26d4e4d6691cd083115783bfea"} Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.480034 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0494d758-5f63-45c6-930c-f34b43484fd9","Type":"ContainerStarted","Data":"6f82f7a9b3c6bf7b7cce645f8d3a5cf7dc56560ae46650ef9b2f6497e72321a1"} Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.510411 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-hc5qn"] Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.516880 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.522603 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.553009 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-hc5qn"] Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.592305 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.593442 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.593629 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-config\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.600432 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcnt9\" (UniqueName: \"kubernetes.io/projected/7caf2b14-ce0f-4266-a844-6c507e9802ab-kube-api-access-wcnt9\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.600763 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.600819 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-svc\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.600859 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703256 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-svc\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703314 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703385 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703402 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703482 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-config\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703512 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcnt9\" (UniqueName: \"kubernetes.io/projected/7caf2b14-ce0f-4266-a844-6c507e9802ab-kube-api-access-wcnt9\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.703614 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.704354 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.705157 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.705442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.705532 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-svc\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.705600 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.705925 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-config\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.737200 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcnt9\" (UniqueName: \"kubernetes.io/projected/7caf2b14-ce0f-4266-a844-6c507e9802ab-kube-api-access-wcnt9\") pod \"dnsmasq-dns-67b789f86c-hc5qn\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:04 crc kubenswrapper[4861]: I1003 13:55:04.852323 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:05 crc kubenswrapper[4861]: I1003 13:55:05.346978 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-hc5qn"] Oct 03 13:55:05 crc kubenswrapper[4861]: I1003 13:55:05.491182 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" event={"ID":"7caf2b14-ce0f-4266-a844-6c507e9802ab","Type":"ContainerStarted","Data":"7cbb0bbb8835659a47f4a4800fe26325332d66f8eb6b5f29049d3447cb17fc2b"} Oct 03 13:55:06 crc kubenswrapper[4861]: I1003 13:55:06.499604 4861 generic.go:334] "Generic (PLEG): container finished" podID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerID="aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703" exitCode=0 Oct 03 13:55:06 crc kubenswrapper[4861]: I1003 13:55:06.499655 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" event={"ID":"7caf2b14-ce0f-4266-a844-6c507e9802ab","Type":"ContainerDied","Data":"aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703"} Oct 03 13:55:07 crc kubenswrapper[4861]: I1003 13:55:07.510010 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" event={"ID":"7caf2b14-ce0f-4266-a844-6c507e9802ab","Type":"ContainerStarted","Data":"177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8"} Oct 03 13:55:07 crc kubenswrapper[4861]: I1003 13:55:07.510448 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:07 crc kubenswrapper[4861]: I1003 13:55:07.534411 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" podStartSLOduration=3.534395198 podStartE2EDuration="3.534395198s" podCreationTimestamp="2025-10-03 13:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:55:07.53104128 +0000 UTC m=+1421.529026347" watchObservedRunningTime="2025-10-03 13:55:07.534395198 +0000 UTC m=+1421.532380245" Oct 03 13:55:12 crc kubenswrapper[4861]: I1003 13:55:12.399859 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:55:12 crc kubenswrapper[4861]: I1003 13:55:12.478635 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:55:12 crc kubenswrapper[4861]: I1003 13:55:12.657457 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vwpt4"] Oct 03 13:55:13 crc kubenswrapper[4861]: I1003 13:55:13.559884 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vwpt4" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" containerID="cri-o://25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059" gracePeriod=2 Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.038753 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.211545 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-utilities\") pod \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.211608 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4x26\" (UniqueName: \"kubernetes.io/projected/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-kube-api-access-f4x26\") pod \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.211847 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-catalog-content\") pod \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\" (UID: \"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f\") " Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.212851 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-utilities" (OuterVolumeSpecName: "utilities") pod "c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" (UID: "c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.218197 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-kube-api-access-f4x26" (OuterVolumeSpecName: "kube-api-access-f4x26") pod "c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" (UID: "c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f"). InnerVolumeSpecName "kube-api-access-f4x26". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.290468 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" (UID: "c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.314689 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.314730 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.314742 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4x26\" (UniqueName: \"kubernetes.io/projected/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f-kube-api-access-f4x26\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.572651 4861 generic.go:334] "Generic (PLEG): container finished" podID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerID="25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059" exitCode=0 Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.572707 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerDied","Data":"25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059"} Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.572763 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwpt4" event={"ID":"c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f","Type":"ContainerDied","Data":"4f8b8d02a231a8e1ef15a935ecbe2c1a53e65edaa814918115a8f568278ff7f0"} Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.572790 4861 scope.go:117] "RemoveContainer" containerID="25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.574340 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwpt4" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.611524 4861 scope.go:117] "RemoveContainer" containerID="afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.623464 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vwpt4"] Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.632578 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vwpt4"] Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.640201 4861 scope.go:117] "RemoveContainer" containerID="b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.678938 4861 scope.go:117] "RemoveContainer" containerID="25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059" Oct 03 13:55:14 crc kubenswrapper[4861]: E1003 13:55:14.679571 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059\": container with ID starting with 25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059 not found: ID does not exist" containerID="25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.679603 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059"} err="failed to get container status \"25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059\": rpc error: code = NotFound desc = could not find container \"25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059\": container with ID starting with 25a27b662ecc16f194e71a0aa50777f553b648b24484f2f11dd85ce4904f2059 not found: ID does not exist" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.679622 4861 scope.go:117] "RemoveContainer" containerID="afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77" Oct 03 13:55:14 crc kubenswrapper[4861]: E1003 13:55:14.679858 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77\": container with ID starting with afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77 not found: ID does not exist" containerID="afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.679876 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77"} err="failed to get container status \"afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77\": rpc error: code = NotFound desc = could not find container \"afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77\": container with ID starting with afe861561a1444d45de9bf356815d9f343e51ad97ff92041d5510e70aab88e77 not found: ID does not exist" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.679889 4861 scope.go:117] "RemoveContainer" containerID="b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7" Oct 03 13:55:14 crc kubenswrapper[4861]: E1003 13:55:14.680152 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7\": container with ID starting with b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7 not found: ID does not exist" containerID="b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.680186 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7"} err="failed to get container status \"b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7\": rpc error: code = NotFound desc = could not find container \"b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7\": container with ID starting with b16c0dbcc35a81f6ea07c484b032653c5effa47c058dab49be59318e79f86de7 not found: ID does not exist" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.694925 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" path="/var/lib/kubelet/pods/c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f/volumes" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.854329 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.925136 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-d4jfw"] Oct 03 13:55:14 crc kubenswrapper[4861]: I1003 13:55:14.926598 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" containerName="dnsmasq-dns" containerID="cri-o://e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9" gracePeriod=10 Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.095665 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79dc84bdb7-xjmx4"] Oct 03 13:55:15 crc kubenswrapper[4861]: E1003 13:55:15.096035 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="extract-content" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.096051 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="extract-content" Oct 03 13:55:15 crc kubenswrapper[4861]: E1003 13:55:15.096087 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="extract-utilities" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.096094 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="extract-utilities" Oct 03 13:55:15 crc kubenswrapper[4861]: E1003 13:55:15.096112 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.096118 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.096332 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9c81fcd-8cf8-4d58-8ae1-8161f6fefa5f" containerName="registry-server" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.097299 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.119598 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79dc84bdb7-xjmx4"] Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.231790 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-openstack-edpm-ipam\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.231832 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-ovsdbserver-nb\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.231887 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-config\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.231912 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-dns-svc\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.231967 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-ovsdbserver-sb\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.232062 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvk4f\" (UniqueName: \"kubernetes.io/projected/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-kube-api-access-nvk4f\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.232186 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-dns-swift-storage-0\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.343831 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-dns-swift-storage-0\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.343922 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-ovsdbserver-nb\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.343951 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-openstack-edpm-ipam\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.343996 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-config\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.344018 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-dns-svc\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.344065 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-ovsdbserver-sb\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.344144 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvk4f\" (UniqueName: \"kubernetes.io/projected/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-kube-api-access-nvk4f\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.344957 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-dns-swift-storage-0\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.345213 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-openstack-edpm-ipam\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.345400 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-dns-svc\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.347857 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-config\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.348591 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-ovsdbserver-nb\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.349203 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-ovsdbserver-sb\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.379064 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvk4f\" (UniqueName: \"kubernetes.io/projected/f6f37303-3f4a-44b7-aef2-ed92a6c277e2-kube-api-access-nvk4f\") pod \"dnsmasq-dns-79dc84bdb7-xjmx4\" (UID: \"f6f37303-3f4a-44b7-aef2-ed92a6c277e2\") " pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.418404 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.567651 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.604947 4861 generic.go:334] "Generic (PLEG): container finished" podID="2c9a4578-b9f0-4eba-a774-580a69276809" containerID="e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9" exitCode=0 Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.604977 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" event={"ID":"2c9a4578-b9f0-4eba-a774-580a69276809","Type":"ContainerDied","Data":"e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9"} Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.604993 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.604999 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-d4jfw" event={"ID":"2c9a4578-b9f0-4eba-a774-580a69276809","Type":"ContainerDied","Data":"0cdb23e1d25c734e0bb8ed8b99504e9dce28da5b0bae88380933b263dff703d3"} Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.605016 4861 scope.go:117] "RemoveContainer" containerID="e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.634960 4861 scope.go:117] "RemoveContainer" containerID="4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.660084 4861 scope.go:117] "RemoveContainer" containerID="e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9" Oct 03 13:55:15 crc kubenswrapper[4861]: E1003 13:55:15.660830 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9\": container with ID starting with e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9 not found: ID does not exist" containerID="e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.660874 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9"} err="failed to get container status \"e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9\": rpc error: code = NotFound desc = could not find container \"e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9\": container with ID starting with e7ce84b2ad4affdb28e3ce0b262409f7e3dfa027dbb6ca09de7efcfa4c0982a9 not found: ID does not exist" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.660905 4861 scope.go:117] "RemoveContainer" containerID="4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a" Oct 03 13:55:15 crc kubenswrapper[4861]: E1003 13:55:15.661376 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a\": container with ID starting with 4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a not found: ID does not exist" containerID="4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.661437 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a"} err="failed to get container status \"4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a\": rpc error: code = NotFound desc = could not find container \"4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a\": container with ID starting with 4c189bd844513e6bacf4e4b38f037a49fbb05e130e1e2b1b9eb2a08793d2645a not found: ID does not exist" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.752975 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-swift-storage-0\") pod \"2c9a4578-b9f0-4eba-a774-580a69276809\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.753188 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5zhl\" (UniqueName: \"kubernetes.io/projected/2c9a4578-b9f0-4eba-a774-580a69276809-kube-api-access-g5zhl\") pod \"2c9a4578-b9f0-4eba-a774-580a69276809\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.753217 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-nb\") pod \"2c9a4578-b9f0-4eba-a774-580a69276809\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.753297 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-config\") pod \"2c9a4578-b9f0-4eba-a774-580a69276809\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.753354 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-sb\") pod \"2c9a4578-b9f0-4eba-a774-580a69276809\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.753391 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-svc\") pod \"2c9a4578-b9f0-4eba-a774-580a69276809\" (UID: \"2c9a4578-b9f0-4eba-a774-580a69276809\") " Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.756939 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c9a4578-b9f0-4eba-a774-580a69276809-kube-api-access-g5zhl" (OuterVolumeSpecName: "kube-api-access-g5zhl") pod "2c9a4578-b9f0-4eba-a774-580a69276809" (UID: "2c9a4578-b9f0-4eba-a774-580a69276809"). InnerVolumeSpecName "kube-api-access-g5zhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.800710 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-config" (OuterVolumeSpecName: "config") pod "2c9a4578-b9f0-4eba-a774-580a69276809" (UID: "2c9a4578-b9f0-4eba-a774-580a69276809"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.812869 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c9a4578-b9f0-4eba-a774-580a69276809" (UID: "2c9a4578-b9f0-4eba-a774-580a69276809"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.817570 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c9a4578-b9f0-4eba-a774-580a69276809" (UID: "2c9a4578-b9f0-4eba-a774-580a69276809"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.829033 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c9a4578-b9f0-4eba-a774-580a69276809" (UID: "2c9a4578-b9f0-4eba-a774-580a69276809"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.849991 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c9a4578-b9f0-4eba-a774-580a69276809" (UID: "2c9a4578-b9f0-4eba-a774-580a69276809"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.856027 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.856057 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.856072 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.856086 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.856100 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5zhl\" (UniqueName: \"kubernetes.io/projected/2c9a4578-b9f0-4eba-a774-580a69276809-kube-api-access-g5zhl\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.856111 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c9a4578-b9f0-4eba-a774-580a69276809-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:15 crc kubenswrapper[4861]: I1003 13:55:15.898191 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79dc84bdb7-xjmx4"] Oct 03 13:55:15 crc kubenswrapper[4861]: W1003 13:55:15.904478 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6f37303_3f4a_44b7_aef2_ed92a6c277e2.slice/crio-71244cf693be0b9cd0cf4ba6f46255986f557ba2f15362506c30525c20252b85 WatchSource:0}: Error finding container 71244cf693be0b9cd0cf4ba6f46255986f557ba2f15362506c30525c20252b85: Status 404 returned error can't find the container with id 71244cf693be0b9cd0cf4ba6f46255986f557ba2f15362506c30525c20252b85 Oct 03 13:55:16 crc kubenswrapper[4861]: I1003 13:55:16.001057 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-d4jfw"] Oct 03 13:55:16 crc kubenswrapper[4861]: I1003 13:55:16.013173 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-d4jfw"] Oct 03 13:55:16 crc kubenswrapper[4861]: I1003 13:55:16.617277 4861 generic.go:334] "Generic (PLEG): container finished" podID="f6f37303-3f4a-44b7-aef2-ed92a6c277e2" containerID="860c18141c0d864353ce7f309d97717cba5d9a9974c361d3a6f2fdcf907992bb" exitCode=0 Oct 03 13:55:16 crc kubenswrapper[4861]: I1003 13:55:16.617382 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" event={"ID":"f6f37303-3f4a-44b7-aef2-ed92a6c277e2","Type":"ContainerDied","Data":"860c18141c0d864353ce7f309d97717cba5d9a9974c361d3a6f2fdcf907992bb"} Oct 03 13:55:16 crc kubenswrapper[4861]: I1003 13:55:16.617710 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" event={"ID":"f6f37303-3f4a-44b7-aef2-ed92a6c277e2","Type":"ContainerStarted","Data":"71244cf693be0b9cd0cf4ba6f46255986f557ba2f15362506c30525c20252b85"} Oct 03 13:55:16 crc kubenswrapper[4861]: I1003 13:55:16.695041 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" path="/var/lib/kubelet/pods/2c9a4578-b9f0-4eba-a774-580a69276809/volumes" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.058010 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cbcx4"] Oct 03 13:55:17 crc kubenswrapper[4861]: E1003 13:55:17.058764 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" containerName="dnsmasq-dns" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.058787 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" containerName="dnsmasq-dns" Oct 03 13:55:17 crc kubenswrapper[4861]: E1003 13:55:17.058827 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" containerName="init" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.058837 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" containerName="init" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.059055 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c9a4578-b9f0-4eba-a774-580a69276809" containerName="dnsmasq-dns" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.060492 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.071998 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cbcx4"] Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.186385 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-catalog-content\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.186539 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-utilities\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.186856 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mrd9\" (UniqueName: \"kubernetes.io/projected/d9057935-6232-4066-9a4c-766c8a1e0e41-kube-api-access-4mrd9\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.288796 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-catalog-content\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.289179 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-utilities\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.289353 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-catalog-content\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.289963 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-utilities\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.290205 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mrd9\" (UniqueName: \"kubernetes.io/projected/d9057935-6232-4066-9a4c-766c8a1e0e41-kube-api-access-4mrd9\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.317273 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mrd9\" (UniqueName: \"kubernetes.io/projected/d9057935-6232-4066-9a4c-766c8a1e0e41-kube-api-access-4mrd9\") pod \"community-operators-cbcx4\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.386360 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.636582 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" event={"ID":"f6f37303-3f4a-44b7-aef2-ed92a6c277e2","Type":"ContainerStarted","Data":"1d8395f58e0479a5cfebe41f782590784d5b9c1b47652c90f877bdc504f909ff"} Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.636731 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.655647 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" podStartSLOduration=2.655627954 podStartE2EDuration="2.655627954s" podCreationTimestamp="2025-10-03 13:55:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:55:17.654218796 +0000 UTC m=+1431.652203843" watchObservedRunningTime="2025-10-03 13:55:17.655627954 +0000 UTC m=+1431.653613001" Oct 03 13:55:17 crc kubenswrapper[4861]: W1003 13:55:17.838848 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9057935_6232_4066_9a4c_766c8a1e0e41.slice/crio-d0fff8515cc6424bbfee075074d87673a7cdcbe790698ff18459d1d12acae0c2 WatchSource:0}: Error finding container d0fff8515cc6424bbfee075074d87673a7cdcbe790698ff18459d1d12acae0c2: Status 404 returned error can't find the container with id d0fff8515cc6424bbfee075074d87673a7cdcbe790698ff18459d1d12acae0c2 Oct 03 13:55:17 crc kubenswrapper[4861]: I1003 13:55:17.841762 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cbcx4"] Oct 03 13:55:18 crc kubenswrapper[4861]: I1003 13:55:18.646882 4861 generic.go:334] "Generic (PLEG): container finished" podID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerID="ffd0f755d14e82ff4cfd758ec5569a12307f55998f7868791678226314becaa5" exitCode=0 Oct 03 13:55:18 crc kubenswrapper[4861]: I1003 13:55:18.647064 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerDied","Data":"ffd0f755d14e82ff4cfd758ec5569a12307f55998f7868791678226314becaa5"} Oct 03 13:55:18 crc kubenswrapper[4861]: I1003 13:55:18.647783 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerStarted","Data":"d0fff8515cc6424bbfee075074d87673a7cdcbe790698ff18459d1d12acae0c2"} Oct 03 13:55:19 crc kubenswrapper[4861]: I1003 13:55:19.678599 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerStarted","Data":"a595f1d226d620bbe4eaa18a758f2cf5e8afdfdbec71cb314a8de42cbf8d587f"} Oct 03 13:55:22 crc kubenswrapper[4861]: I1003 13:55:22.708500 4861 generic.go:334] "Generic (PLEG): container finished" podID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerID="a595f1d226d620bbe4eaa18a758f2cf5e8afdfdbec71cb314a8de42cbf8d587f" exitCode=0 Oct 03 13:55:22 crc kubenswrapper[4861]: I1003 13:55:22.708600 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerDied","Data":"a595f1d226d620bbe4eaa18a758f2cf5e8afdfdbec71cb314a8de42cbf8d587f"} Oct 03 13:55:24 crc kubenswrapper[4861]: I1003 13:55:24.733532 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerStarted","Data":"50cde12beb19eeb30a62c3b9f0fcb0e823e9de8e4113948f07d64f7cd24888f8"} Oct 03 13:55:24 crc kubenswrapper[4861]: I1003 13:55:24.758728 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cbcx4" podStartSLOduration=2.8184167860000002 podStartE2EDuration="7.758709954s" podCreationTimestamp="2025-10-03 13:55:17 +0000 UTC" firstStartedPulling="2025-10-03 13:55:18.648653471 +0000 UTC m=+1432.646638518" lastFinishedPulling="2025-10-03 13:55:23.588946639 +0000 UTC m=+1437.586931686" observedRunningTime="2025-10-03 13:55:24.75667871 +0000 UTC m=+1438.754663747" watchObservedRunningTime="2025-10-03 13:55:24.758709954 +0000 UTC m=+1438.756695001" Oct 03 13:55:25 crc kubenswrapper[4861]: I1003 13:55:25.420984 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79dc84bdb7-xjmx4" Oct 03 13:55:25 crc kubenswrapper[4861]: I1003 13:55:25.524612 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-hc5qn"] Oct 03 13:55:25 crc kubenswrapper[4861]: I1003 13:55:25.524842 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerName="dnsmasq-dns" containerID="cri-o://177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8" gracePeriod=10 Oct 03 13:55:27 crc kubenswrapper[4861]: I1003 13:55:27.386498 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:27 crc kubenswrapper[4861]: I1003 13:55:27.387381 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:27 crc kubenswrapper[4861]: I1003 13:55:27.444073 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.712756 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.776614 4861 generic.go:334] "Generic (PLEG): container finished" podID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerID="177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8" exitCode=0 Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.776660 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" event={"ID":"7caf2b14-ce0f-4266-a844-6c507e9802ab","Type":"ContainerDied","Data":"177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8"} Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.776706 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" event={"ID":"7caf2b14-ce0f-4266-a844-6c507e9802ab","Type":"ContainerDied","Data":"7cbb0bbb8835659a47f4a4800fe26325332d66f8eb6b5f29049d3447cb17fc2b"} Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.776725 4861 scope.go:117] "RemoveContainer" containerID="177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.776667 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-hc5qn" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.795927 4861 scope.go:117] "RemoveContainer" containerID="aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.813679 4861 scope.go:117] "RemoveContainer" containerID="177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8" Oct 03 13:55:28 crc kubenswrapper[4861]: E1003 13:55:28.814132 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8\": container with ID starting with 177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8 not found: ID does not exist" containerID="177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.814168 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8"} err="failed to get container status \"177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8\": rpc error: code = NotFound desc = could not find container \"177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8\": container with ID starting with 177183e143e25882621c655b779e010012aa165663d6ca812d44afb21249e3b8 not found: ID does not exist" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.814190 4861 scope.go:117] "RemoveContainer" containerID="aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703" Oct 03 13:55:28 crc kubenswrapper[4861]: E1003 13:55:28.814442 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703\": container with ID starting with aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703 not found: ID does not exist" containerID="aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.814470 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703"} err="failed to get container status \"aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703\": rpc error: code = NotFound desc = could not find container \"aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703\": container with ID starting with aa927244878d3e3dd97cc14b01e6b2255fdcd9fbd7cd4d6dc9b46b8576ce4703 not found: ID does not exist" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833282 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcnt9\" (UniqueName: \"kubernetes.io/projected/7caf2b14-ce0f-4266-a844-6c507e9802ab-kube-api-access-wcnt9\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833364 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-sb\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833410 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-openstack-edpm-ipam\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833426 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-config\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833479 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-swift-storage-0\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833496 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-nb\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.833531 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-svc\") pod \"7caf2b14-ce0f-4266-a844-6c507e9802ab\" (UID: \"7caf2b14-ce0f-4266-a844-6c507e9802ab\") " Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.839506 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7caf2b14-ce0f-4266-a844-6c507e9802ab-kube-api-access-wcnt9" (OuterVolumeSpecName: "kube-api-access-wcnt9") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "kube-api-access-wcnt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.883203 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-config" (OuterVolumeSpecName: "config") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.889572 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.889790 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.893843 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.902493 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.904893 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "7caf2b14-ce0f-4266-a844-6c507e9802ab" (UID: "7caf2b14-ce0f-4266-a844-6c507e9802ab"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936142 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936179 4861 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936192 4861 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936205 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936217 4861 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936310 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcnt9\" (UniqueName: \"kubernetes.io/projected/7caf2b14-ce0f-4266-a844-6c507e9802ab-kube-api-access-wcnt9\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:28 crc kubenswrapper[4861]: I1003 13:55:28.936326 4861 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7caf2b14-ce0f-4266-a844-6c507e9802ab-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:29 crc kubenswrapper[4861]: I1003 13:55:29.107479 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-hc5qn"] Oct 03 13:55:29 crc kubenswrapper[4861]: I1003 13:55:29.115479 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-hc5qn"] Oct 03 13:55:30 crc kubenswrapper[4861]: I1003 13:55:30.690549 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" path="/var/lib/kubelet/pods/7caf2b14-ce0f-4266-a844-6c507e9802ab/volumes" Oct 03 13:55:35 crc kubenswrapper[4861]: I1003 13:55:35.847900 4861 generic.go:334] "Generic (PLEG): container finished" podID="0494d758-5f63-45c6-930c-f34b43484fd9" containerID="6f82f7a9b3c6bf7b7cce645f8d3a5cf7dc56560ae46650ef9b2f6497e72321a1" exitCode=0 Oct 03 13:55:35 crc kubenswrapper[4861]: I1003 13:55:35.848069 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0494d758-5f63-45c6-930c-f34b43484fd9","Type":"ContainerDied","Data":"6f82f7a9b3c6bf7b7cce645f8d3a5cf7dc56560ae46650ef9b2f6497e72321a1"} Oct 03 13:55:36 crc kubenswrapper[4861]: I1003 13:55:36.859702 4861 generic.go:334] "Generic (PLEG): container finished" podID="b222a9ce-46d6-4caf-b76d-f6b773276cb1" containerID="7301a3c29bed9ed403397d330ac8898babfd3f26d4e4d6691cd083115783bfea" exitCode=0 Oct 03 13:55:36 crc kubenswrapper[4861]: I1003 13:55:36.859792 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b222a9ce-46d6-4caf-b76d-f6b773276cb1","Type":"ContainerDied","Data":"7301a3c29bed9ed403397d330ac8898babfd3f26d4e4d6691cd083115783bfea"} Oct 03 13:55:37 crc kubenswrapper[4861]: I1003 13:55:37.466500 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:37 crc kubenswrapper[4861]: I1003 13:55:37.528216 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cbcx4"] Oct 03 13:55:37 crc kubenswrapper[4861]: I1003 13:55:37.868784 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0494d758-5f63-45c6-930c-f34b43484fd9","Type":"ContainerStarted","Data":"42b02a6a07b6beb0750d121c75a6b5db214d6345ab51e114178a9ab9f9268dbd"} Oct 03 13:55:37 crc kubenswrapper[4861]: I1003 13:55:37.868970 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cbcx4" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="registry-server" containerID="cri-o://50cde12beb19eeb30a62c3b9f0fcb0e823e9de8e4113948f07d64f7cd24888f8" gracePeriod=2 Oct 03 13:55:38 crc kubenswrapper[4861]: I1003 13:55:38.881956 4861 generic.go:334] "Generic (PLEG): container finished" podID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerID="50cde12beb19eeb30a62c3b9f0fcb0e823e9de8e4113948f07d64f7cd24888f8" exitCode=0 Oct 03 13:55:38 crc kubenswrapper[4861]: I1003 13:55:38.882043 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerDied","Data":"50cde12beb19eeb30a62c3b9f0fcb0e823e9de8e4113948f07d64f7cd24888f8"} Oct 03 13:55:38 crc kubenswrapper[4861]: I1003 13:55:38.884782 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b222a9ce-46d6-4caf-b76d-f6b773276cb1","Type":"ContainerStarted","Data":"7d98d19a7ba652fda2c2f14527ee47e0fafd329b5be5d10d2654a92c86294e81"} Oct 03 13:55:38 crc kubenswrapper[4861]: I1003 13:55:38.885047 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:38 crc kubenswrapper[4861]: I1003 13:55:38.910752 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.910702722 podStartE2EDuration="37.910702722s" podCreationTimestamp="2025-10-03 13:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:55:38.90383704 +0000 UTC m=+1452.901822097" watchObservedRunningTime="2025-10-03 13:55:38.910702722 +0000 UTC m=+1452.908687769" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.400485 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.419575 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.419556388 podStartE2EDuration="38.419556388s" podCreationTimestamp="2025-10-03 13:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:55:38.939717591 +0000 UTC m=+1452.937702648" watchObservedRunningTime="2025-10-03 13:55:39.419556388 +0000 UTC m=+1453.417541435" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.537776 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-utilities\") pod \"d9057935-6232-4066-9a4c-766c8a1e0e41\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.537933 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-catalog-content\") pod \"d9057935-6232-4066-9a4c-766c8a1e0e41\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.538025 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mrd9\" (UniqueName: \"kubernetes.io/projected/d9057935-6232-4066-9a4c-766c8a1e0e41-kube-api-access-4mrd9\") pod \"d9057935-6232-4066-9a4c-766c8a1e0e41\" (UID: \"d9057935-6232-4066-9a4c-766c8a1e0e41\") " Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.538901 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-utilities" (OuterVolumeSpecName: "utilities") pod "d9057935-6232-4066-9a4c-766c8a1e0e41" (UID: "d9057935-6232-4066-9a4c-766c8a1e0e41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.557115 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9057935-6232-4066-9a4c-766c8a1e0e41-kube-api-access-4mrd9" (OuterVolumeSpecName: "kube-api-access-4mrd9") pod "d9057935-6232-4066-9a4c-766c8a1e0e41" (UID: "d9057935-6232-4066-9a4c-766c8a1e0e41"). InnerVolumeSpecName "kube-api-access-4mrd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.609002 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9057935-6232-4066-9a4c-766c8a1e0e41" (UID: "d9057935-6232-4066-9a4c-766c8a1e0e41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.639747 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.639988 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mrd9\" (UniqueName: \"kubernetes.io/projected/d9057935-6232-4066-9a4c-766c8a1e0e41-kube-api-access-4mrd9\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.640046 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9057935-6232-4066-9a4c-766c8a1e0e41-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.895469 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbcx4" event={"ID":"d9057935-6232-4066-9a4c-766c8a1e0e41","Type":"ContainerDied","Data":"d0fff8515cc6424bbfee075074d87673a7cdcbe790698ff18459d1d12acae0c2"} Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.895830 4861 scope.go:117] "RemoveContainer" containerID="50cde12beb19eeb30a62c3b9f0fcb0e823e9de8e4113948f07d64f7cd24888f8" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.896038 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbcx4" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.917571 4861 scope.go:117] "RemoveContainer" containerID="a595f1d226d620bbe4eaa18a758f2cf5e8afdfdbec71cb314a8de42cbf8d587f" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.949426 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cbcx4"] Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.952934 4861 scope.go:117] "RemoveContainer" containerID="ffd0f755d14e82ff4cfd758ec5569a12307f55998f7868791678226314becaa5" Oct 03 13:55:39 crc kubenswrapper[4861]: I1003 13:55:39.962293 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cbcx4"] Oct 03 13:55:40 crc kubenswrapper[4861]: I1003 13:55:40.692961 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" path="/var/lib/kubelet/pods/d9057935-6232-4066-9a4c-766c8a1e0e41/volumes" Oct 03 13:55:41 crc kubenswrapper[4861]: I1003 13:55:41.721691 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.908754 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67"] Oct 03 13:55:48 crc kubenswrapper[4861]: E1003 13:55:48.909606 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="registry-server" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909619 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="registry-server" Oct 03 13:55:48 crc kubenswrapper[4861]: E1003 13:55:48.909643 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="extract-utilities" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909649 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="extract-utilities" Oct 03 13:55:48 crc kubenswrapper[4861]: E1003 13:55:48.909663 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="extract-content" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909676 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="extract-content" Oct 03 13:55:48 crc kubenswrapper[4861]: E1003 13:55:48.909688 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerName="init" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909696 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerName="init" Oct 03 13:55:48 crc kubenswrapper[4861]: E1003 13:55:48.909709 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerName="dnsmasq-dns" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909716 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerName="dnsmasq-dns" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909928 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7caf2b14-ce0f-4266-a844-6c507e9802ab" containerName="dnsmasq-dns" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.909939 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9057935-6232-4066-9a4c-766c8a1e0e41" containerName="registry-server" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.910675 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.913496 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.914043 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.914381 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.914738 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 13:55:48 crc kubenswrapper[4861]: I1003 13:55:48.934206 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67"] Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.035798 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxzf5\" (UniqueName: \"kubernetes.io/projected/bb1b5aa4-44c6-475c-8995-ac100260ce29-kube-api-access-bxzf5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.035934 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.037379 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.037525 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.138955 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.139375 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.139422 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.139474 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxzf5\" (UniqueName: \"kubernetes.io/projected/bb1b5aa4-44c6-475c-8995-ac100260ce29-kube-api-access-bxzf5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.145471 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.145767 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.146005 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.165842 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxzf5\" (UniqueName: \"kubernetes.io/projected/bb1b5aa4-44c6-475c-8995-ac100260ce29-kube-api-access-bxzf5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:49 crc kubenswrapper[4861]: I1003 13:55:49.234347 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:55:50 crc kubenswrapper[4861]: I1003 13:55:50.450201 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67"] Oct 03 13:55:51 crc kubenswrapper[4861]: I1003 13:55:51.001567 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" event={"ID":"bb1b5aa4-44c6-475c-8995-ac100260ce29","Type":"ContainerStarted","Data":"afadeed08e6ed309e9ec6e7ddbc419b97564023cf5e3609bc7cabb0a02748096"} Oct 03 13:55:51 crc kubenswrapper[4861]: I1003 13:55:51.726483 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 13:55:51 crc kubenswrapper[4861]: I1003 13:55:51.855552 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:55:55 crc kubenswrapper[4861]: I1003 13:55:55.926925 4861 scope.go:117] "RemoveContainer" containerID="60250a7670cba43875a0bb507a6bf0501a8f81070df90b0d0da123ba0742bc8f" Oct 03 13:56:00 crc kubenswrapper[4861]: I1003 13:56:00.144917 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:56:00 crc kubenswrapper[4861]: I1003 13:56:00.145460 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:56:04 crc kubenswrapper[4861]: I1003 13:56:04.149507 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" event={"ID":"bb1b5aa4-44c6-475c-8995-ac100260ce29","Type":"ContainerStarted","Data":"65608e0fd1727d96cda7e9015644cc260928045bcfa639b2e169208695ff158e"} Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.104907 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" podStartSLOduration=6.118097002 podStartE2EDuration="19.104882985s" podCreationTimestamp="2025-10-03 13:55:48 +0000 UTC" firstStartedPulling="2025-10-03 13:55:50.461983522 +0000 UTC m=+1464.459968569" lastFinishedPulling="2025-10-03 13:56:03.448769505 +0000 UTC m=+1477.446754552" observedRunningTime="2025-10-03 13:56:04.17132957 +0000 UTC m=+1478.169314627" watchObservedRunningTime="2025-10-03 13:56:07.104882985 +0000 UTC m=+1481.102868032" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.113690 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-59c5l"] Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.115963 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.141943 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-59c5l"] Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.203197 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-catalog-content\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.203269 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-utilities\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.203462 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpqk2\" (UniqueName: \"kubernetes.io/projected/e5f51cc9-0787-406a-9488-30de104d9043-kube-api-access-rpqk2\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.305346 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpqk2\" (UniqueName: \"kubernetes.io/projected/e5f51cc9-0787-406a-9488-30de104d9043-kube-api-access-rpqk2\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.305509 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-catalog-content\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.305527 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-utilities\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.305939 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-catalog-content\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.306005 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-utilities\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.324804 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpqk2\" (UniqueName: \"kubernetes.io/projected/e5f51cc9-0787-406a-9488-30de104d9043-kube-api-access-rpqk2\") pod \"certified-operators-59c5l\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.439573 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:07 crc kubenswrapper[4861]: I1003 13:56:07.909902 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-59c5l"] Oct 03 13:56:08 crc kubenswrapper[4861]: I1003 13:56:08.192462 4861 generic.go:334] "Generic (PLEG): container finished" podID="e5f51cc9-0787-406a-9488-30de104d9043" containerID="c7fc00420bbae860764b323536b50bfa4646e6aedf95a89a85b391bdc63a4ec5" exitCode=0 Oct 03 13:56:08 crc kubenswrapper[4861]: I1003 13:56:08.192533 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerDied","Data":"c7fc00420bbae860764b323536b50bfa4646e6aedf95a89a85b391bdc63a4ec5"} Oct 03 13:56:08 crc kubenswrapper[4861]: I1003 13:56:08.192833 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerStarted","Data":"7024f915799e629a349dc2ba45acf20ca29dd514c7613d87be35d737793e7c76"} Oct 03 13:56:09 crc kubenswrapper[4861]: I1003 13:56:09.222123 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerStarted","Data":"95c367fdf3ae845cef2e8a6513d80d93563ab8024ec7937ca3119cdcf0852f4c"} Oct 03 13:56:11 crc kubenswrapper[4861]: I1003 13:56:11.240905 4861 generic.go:334] "Generic (PLEG): container finished" podID="e5f51cc9-0787-406a-9488-30de104d9043" containerID="95c367fdf3ae845cef2e8a6513d80d93563ab8024ec7937ca3119cdcf0852f4c" exitCode=0 Oct 03 13:56:11 crc kubenswrapper[4861]: I1003 13:56:11.240976 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerDied","Data":"95c367fdf3ae845cef2e8a6513d80d93563ab8024ec7937ca3119cdcf0852f4c"} Oct 03 13:56:12 crc kubenswrapper[4861]: I1003 13:56:12.252573 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerStarted","Data":"8dadb41dbb7cd3cc1c5020677b6b5198eef1163fe42c0c22da9d09335851ca55"} Oct 03 13:56:12 crc kubenswrapper[4861]: I1003 13:56:12.277068 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-59c5l" podStartSLOduration=1.747624115 podStartE2EDuration="5.277048534s" podCreationTimestamp="2025-10-03 13:56:07 +0000 UTC" firstStartedPulling="2025-10-03 13:56:08.197178455 +0000 UTC m=+1482.195163502" lastFinishedPulling="2025-10-03 13:56:11.726602864 +0000 UTC m=+1485.724587921" observedRunningTime="2025-10-03 13:56:12.273398447 +0000 UTC m=+1486.271383514" watchObservedRunningTime="2025-10-03 13:56:12.277048534 +0000 UTC m=+1486.275033581" Oct 03 13:56:16 crc kubenswrapper[4861]: I1003 13:56:16.303305 4861 generic.go:334] "Generic (PLEG): container finished" podID="bb1b5aa4-44c6-475c-8995-ac100260ce29" containerID="65608e0fd1727d96cda7e9015644cc260928045bcfa639b2e169208695ff158e" exitCode=0 Oct 03 13:56:16 crc kubenswrapper[4861]: I1003 13:56:16.303451 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" event={"ID":"bb1b5aa4-44c6-475c-8995-ac100260ce29","Type":"ContainerDied","Data":"65608e0fd1727d96cda7e9015644cc260928045bcfa639b2e169208695ff158e"} Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.440123 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.440338 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.505119 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.751902 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.813211 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-ssh-key\") pod \"bb1b5aa4-44c6-475c-8995-ac100260ce29\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.813354 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-repo-setup-combined-ca-bundle\") pod \"bb1b5aa4-44c6-475c-8995-ac100260ce29\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.813462 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-inventory\") pod \"bb1b5aa4-44c6-475c-8995-ac100260ce29\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.813702 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxzf5\" (UniqueName: \"kubernetes.io/projected/bb1b5aa4-44c6-475c-8995-ac100260ce29-kube-api-access-bxzf5\") pod \"bb1b5aa4-44c6-475c-8995-ac100260ce29\" (UID: \"bb1b5aa4-44c6-475c-8995-ac100260ce29\") " Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.820583 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb1b5aa4-44c6-475c-8995-ac100260ce29-kube-api-access-bxzf5" (OuterVolumeSpecName: "kube-api-access-bxzf5") pod "bb1b5aa4-44c6-475c-8995-ac100260ce29" (UID: "bb1b5aa4-44c6-475c-8995-ac100260ce29"). InnerVolumeSpecName "kube-api-access-bxzf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.821140 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "bb1b5aa4-44c6-475c-8995-ac100260ce29" (UID: "bb1b5aa4-44c6-475c-8995-ac100260ce29"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.842889 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-inventory" (OuterVolumeSpecName: "inventory") pod "bb1b5aa4-44c6-475c-8995-ac100260ce29" (UID: "bb1b5aa4-44c6-475c-8995-ac100260ce29"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.846145 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bb1b5aa4-44c6-475c-8995-ac100260ce29" (UID: "bb1b5aa4-44c6-475c-8995-ac100260ce29"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.918471 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxzf5\" (UniqueName: \"kubernetes.io/projected/bb1b5aa4-44c6-475c-8995-ac100260ce29-kube-api-access-bxzf5\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.918540 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.918555 4861 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:17 crc kubenswrapper[4861]: I1003 13:56:17.918568 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb1b5aa4-44c6-475c-8995-ac100260ce29-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.322672 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" event={"ID":"bb1b5aa4-44c6-475c-8995-ac100260ce29","Type":"ContainerDied","Data":"afadeed08e6ed309e9ec6e7ddbc419b97564023cf5e3609bc7cabb0a02748096"} Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.322998 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afadeed08e6ed309e9ec6e7ddbc419b97564023cf5e3609bc7cabb0a02748096" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.322774 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.378928 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.413968 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl"] Oct 03 13:56:18 crc kubenswrapper[4861]: E1003 13:56:18.414355 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb1b5aa4-44c6-475c-8995-ac100260ce29" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.414373 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb1b5aa4-44c6-475c-8995-ac100260ce29" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.414636 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb1b5aa4-44c6-475c-8995-ac100260ce29" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.415658 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.417966 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.418352 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.418514 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.420581 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.455651 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl"] Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.476064 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-59c5l"] Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.527630 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.527694 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glpfs\" (UniqueName: \"kubernetes.io/projected/a5b6d421-13d1-4c5b-b244-087790b16c8b-kube-api-access-glpfs\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.527716 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.630170 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.630256 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glpfs\" (UniqueName: \"kubernetes.io/projected/a5b6d421-13d1-4c5b-b244-087790b16c8b-kube-api-access-glpfs\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.630308 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.634757 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.634904 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.647456 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glpfs\" (UniqueName: \"kubernetes.io/projected/a5b6d421-13d1-4c5b-b244-087790b16c8b-kube-api-access-glpfs\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-js7xl\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:18 crc kubenswrapper[4861]: I1003 13:56:18.731931 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:19 crc kubenswrapper[4861]: I1003 13:56:19.244436 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl"] Oct 03 13:56:19 crc kubenswrapper[4861]: W1003 13:56:19.250319 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5b6d421_13d1_4c5b_b244_087790b16c8b.slice/crio-a7ee93ae6f0a258ace6490735abb32db2e78ca6806127ced3be20b9c2c9a5e46 WatchSource:0}: Error finding container a7ee93ae6f0a258ace6490735abb32db2e78ca6806127ced3be20b9c2c9a5e46: Status 404 returned error can't find the container with id a7ee93ae6f0a258ace6490735abb32db2e78ca6806127ced3be20b9c2c9a5e46 Oct 03 13:56:19 crc kubenswrapper[4861]: I1003 13:56:19.332116 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" event={"ID":"a5b6d421-13d1-4c5b-b244-087790b16c8b","Type":"ContainerStarted","Data":"a7ee93ae6f0a258ace6490735abb32db2e78ca6806127ced3be20b9c2c9a5e46"} Oct 03 13:56:20 crc kubenswrapper[4861]: I1003 13:56:20.346626 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-59c5l" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="registry-server" containerID="cri-o://8dadb41dbb7cd3cc1c5020677b6b5198eef1163fe42c0c22da9d09335851ca55" gracePeriod=2 Oct 03 13:56:20 crc kubenswrapper[4861]: I1003 13:56:20.347463 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" event={"ID":"a5b6d421-13d1-4c5b-b244-087790b16c8b","Type":"ContainerStarted","Data":"00c3bdc41478bb368e37dc7e922d73b6515b22d5d9dbabbba31e1ab5261ac3ec"} Oct 03 13:56:20 crc kubenswrapper[4861]: I1003 13:56:20.375609 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" podStartSLOduration=1.933846653 podStartE2EDuration="2.375537214s" podCreationTimestamp="2025-10-03 13:56:18 +0000 UTC" firstStartedPulling="2025-10-03 13:56:19.259087986 +0000 UTC m=+1493.257073033" lastFinishedPulling="2025-10-03 13:56:19.700778547 +0000 UTC m=+1493.698763594" observedRunningTime="2025-10-03 13:56:20.364436668 +0000 UTC m=+1494.362421785" watchObservedRunningTime="2025-10-03 13:56:20.375537214 +0000 UTC m=+1494.373522301" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.361492 4861 generic.go:334] "Generic (PLEG): container finished" podID="e5f51cc9-0787-406a-9488-30de104d9043" containerID="8dadb41dbb7cd3cc1c5020677b6b5198eef1163fe42c0c22da9d09335851ca55" exitCode=0 Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.361540 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerDied","Data":"8dadb41dbb7cd3cc1c5020677b6b5198eef1163fe42c0c22da9d09335851ca55"} Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.361817 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59c5l" event={"ID":"e5f51cc9-0787-406a-9488-30de104d9043","Type":"ContainerDied","Data":"7024f915799e629a349dc2ba45acf20ca29dd514c7613d87be35d737793e7c76"} Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.361835 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7024f915799e629a349dc2ba45acf20ca29dd514c7613d87be35d737793e7c76" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.381349 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.482810 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-catalog-content\") pod \"e5f51cc9-0787-406a-9488-30de104d9043\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.482983 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-utilities\") pod \"e5f51cc9-0787-406a-9488-30de104d9043\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.483055 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpqk2\" (UniqueName: \"kubernetes.io/projected/e5f51cc9-0787-406a-9488-30de104d9043-kube-api-access-rpqk2\") pod \"e5f51cc9-0787-406a-9488-30de104d9043\" (UID: \"e5f51cc9-0787-406a-9488-30de104d9043\") " Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.486277 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-utilities" (OuterVolumeSpecName: "utilities") pod "e5f51cc9-0787-406a-9488-30de104d9043" (UID: "e5f51cc9-0787-406a-9488-30de104d9043"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.491554 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f51cc9-0787-406a-9488-30de104d9043-kube-api-access-rpqk2" (OuterVolumeSpecName: "kube-api-access-rpqk2") pod "e5f51cc9-0787-406a-9488-30de104d9043" (UID: "e5f51cc9-0787-406a-9488-30de104d9043"). InnerVolumeSpecName "kube-api-access-rpqk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.538297 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5f51cc9-0787-406a-9488-30de104d9043" (UID: "e5f51cc9-0787-406a-9488-30de104d9043"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.585859 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpqk2\" (UniqueName: \"kubernetes.io/projected/e5f51cc9-0787-406a-9488-30de104d9043-kube-api-access-rpqk2\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.585890 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:21 crc kubenswrapper[4861]: I1003 13:56:21.585899 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5f51cc9-0787-406a-9488-30de104d9043-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:22 crc kubenswrapper[4861]: I1003 13:56:22.369713 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-59c5l" Oct 03 13:56:22 crc kubenswrapper[4861]: I1003 13:56:22.405018 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-59c5l"] Oct 03 13:56:22 crc kubenswrapper[4861]: I1003 13:56:22.414174 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-59c5l"] Oct 03 13:56:22 crc kubenswrapper[4861]: I1003 13:56:22.691139 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5f51cc9-0787-406a-9488-30de104d9043" path="/var/lib/kubelet/pods/e5f51cc9-0787-406a-9488-30de104d9043/volumes" Oct 03 13:56:23 crc kubenswrapper[4861]: I1003 13:56:23.378350 4861 generic.go:334] "Generic (PLEG): container finished" podID="a5b6d421-13d1-4c5b-b244-087790b16c8b" containerID="00c3bdc41478bb368e37dc7e922d73b6515b22d5d9dbabbba31e1ab5261ac3ec" exitCode=0 Oct 03 13:56:23 crc kubenswrapper[4861]: I1003 13:56:23.378423 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" event={"ID":"a5b6d421-13d1-4c5b-b244-087790b16c8b","Type":"ContainerDied","Data":"00c3bdc41478bb368e37dc7e922d73b6515b22d5d9dbabbba31e1ab5261ac3ec"} Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.826457 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.944692 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-ssh-key\") pod \"a5b6d421-13d1-4c5b-b244-087790b16c8b\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.944971 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glpfs\" (UniqueName: \"kubernetes.io/projected/a5b6d421-13d1-4c5b-b244-087790b16c8b-kube-api-access-glpfs\") pod \"a5b6d421-13d1-4c5b-b244-087790b16c8b\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.945344 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-inventory\") pod \"a5b6d421-13d1-4c5b-b244-087790b16c8b\" (UID: \"a5b6d421-13d1-4c5b-b244-087790b16c8b\") " Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.952540 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5b6d421-13d1-4c5b-b244-087790b16c8b-kube-api-access-glpfs" (OuterVolumeSpecName: "kube-api-access-glpfs") pod "a5b6d421-13d1-4c5b-b244-087790b16c8b" (UID: "a5b6d421-13d1-4c5b-b244-087790b16c8b"). InnerVolumeSpecName "kube-api-access-glpfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.972667 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a5b6d421-13d1-4c5b-b244-087790b16c8b" (UID: "a5b6d421-13d1-4c5b-b244-087790b16c8b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:56:24 crc kubenswrapper[4861]: I1003 13:56:24.972689 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-inventory" (OuterVolumeSpecName: "inventory") pod "a5b6d421-13d1-4c5b-b244-087790b16c8b" (UID: "a5b6d421-13d1-4c5b-b244-087790b16c8b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.048151 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.048193 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glpfs\" (UniqueName: \"kubernetes.io/projected/a5b6d421-13d1-4c5b-b244-087790b16c8b-kube-api-access-glpfs\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.048243 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5b6d421-13d1-4c5b-b244-087790b16c8b-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.399686 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" event={"ID":"a5b6d421-13d1-4c5b-b244-087790b16c8b","Type":"ContainerDied","Data":"a7ee93ae6f0a258ace6490735abb32db2e78ca6806127ced3be20b9c2c9a5e46"} Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.399728 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7ee93ae6f0a258ace6490735abb32db2e78ca6806127ced3be20b9c2c9a5e46" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.399737 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-js7xl" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482056 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n"] Oct 03 13:56:25 crc kubenswrapper[4861]: E1003 13:56:25.482480 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="extract-content" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482497 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="extract-content" Oct 03 13:56:25 crc kubenswrapper[4861]: E1003 13:56:25.482512 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="registry-server" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482518 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="registry-server" Oct 03 13:56:25 crc kubenswrapper[4861]: E1003 13:56:25.482530 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5b6d421-13d1-4c5b-b244-087790b16c8b" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482537 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5b6d421-13d1-4c5b-b244-087790b16c8b" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 13:56:25 crc kubenswrapper[4861]: E1003 13:56:25.482555 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="extract-utilities" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482561 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="extract-utilities" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482744 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f51cc9-0787-406a-9488-30de104d9043" containerName="registry-server" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.482765 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5b6d421-13d1-4c5b-b244-087790b16c8b" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.483498 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.488750 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.488989 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.489184 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.489339 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.496174 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n"] Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.556708 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.556758 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.556822 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrgvd\" (UniqueName: \"kubernetes.io/projected/88cfdf25-6cf6-4553-a95b-d49e13d2f509-kube-api-access-qrgvd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.556876 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.658560 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrgvd\" (UniqueName: \"kubernetes.io/projected/88cfdf25-6cf6-4553-a95b-d49e13d2f509-kube-api-access-qrgvd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.658684 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.658769 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.658812 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.665328 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.668776 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.674269 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.683125 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrgvd\" (UniqueName: \"kubernetes.io/projected/88cfdf25-6cf6-4553-a95b-d49e13d2f509-kube-api-access-qrgvd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:25 crc kubenswrapper[4861]: I1003 13:56:25.804734 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:56:26 crc kubenswrapper[4861]: I1003 13:56:26.326852 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n"] Oct 03 13:56:26 crc kubenswrapper[4861]: I1003 13:56:26.410882 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" event={"ID":"88cfdf25-6cf6-4553-a95b-d49e13d2f509","Type":"ContainerStarted","Data":"8e80c87361fe5d79c441a1673f5761d3669d412b13265d75d44801f0579bf8de"} Oct 03 13:56:27 crc kubenswrapper[4861]: I1003 13:56:27.423072 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" event={"ID":"88cfdf25-6cf6-4553-a95b-d49e13d2f509","Type":"ContainerStarted","Data":"65c11677b2cc71ab0fc089da1ab632d375df879d98d3555ec572d8791edc2b74"} Oct 03 13:56:27 crc kubenswrapper[4861]: I1003 13:56:27.445393 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" podStartSLOduration=2.2715305519999998 podStartE2EDuration="2.445372949s" podCreationTimestamp="2025-10-03 13:56:25 +0000 UTC" firstStartedPulling="2025-10-03 13:56:26.353292431 +0000 UTC m=+1500.351277478" lastFinishedPulling="2025-10-03 13:56:26.527134828 +0000 UTC m=+1500.525119875" observedRunningTime="2025-10-03 13:56:27.440370275 +0000 UTC m=+1501.438355322" watchObservedRunningTime="2025-10-03 13:56:27.445372949 +0000 UTC m=+1501.443358006" Oct 03 13:56:30 crc kubenswrapper[4861]: I1003 13:56:30.145042 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:56:30 crc kubenswrapper[4861]: I1003 13:56:30.145542 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:56:56 crc kubenswrapper[4861]: I1003 13:56:56.481682 4861 scope.go:117] "RemoveContainer" containerID="932053692d9d01408cca63828f1bcc734486fa7c9accdebe01347dd82fbc2416" Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.145214 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.145734 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.145777 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.146434 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.146477 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" gracePeriod=600 Oct 03 13:57:00 crc kubenswrapper[4861]: E1003 13:57:00.267686 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.716600 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" exitCode=0 Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.716858 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c"} Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.716957 4861 scope.go:117] "RemoveContainer" containerID="13a0d25a9a90da6fc94ead2cdfeed1d7dc6194708455cf7dc135deca83d68d28" Oct 03 13:57:00 crc kubenswrapper[4861]: I1003 13:57:00.717344 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:57:00 crc kubenswrapper[4861]: E1003 13:57:00.717620 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:57:12 crc kubenswrapper[4861]: I1003 13:57:12.682636 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:57:12 crc kubenswrapper[4861]: E1003 13:57:12.683736 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:57:27 crc kubenswrapper[4861]: I1003 13:57:27.681564 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:57:27 crc kubenswrapper[4861]: E1003 13:57:27.682172 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:57:34 crc kubenswrapper[4861]: I1003 13:57:34.981616 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5nfwx"] Oct 03 13:57:34 crc kubenswrapper[4861]: I1003 13:57:34.988759 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:34 crc kubenswrapper[4861]: I1003 13:57:34.996594 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nfwx"] Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.150708 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-utilities\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.150817 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-catalog-content\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.150901 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2zvm\" (UniqueName: \"kubernetes.io/projected/ed298cda-8891-48eb-af45-fd8746c8ab19-kube-api-access-r2zvm\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.253259 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2zvm\" (UniqueName: \"kubernetes.io/projected/ed298cda-8891-48eb-af45-fd8746c8ab19-kube-api-access-r2zvm\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.253414 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-utilities\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.253544 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-catalog-content\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.254543 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-catalog-content\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.254644 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-utilities\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.279954 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2zvm\" (UniqueName: \"kubernetes.io/projected/ed298cda-8891-48eb-af45-fd8746c8ab19-kube-api-access-r2zvm\") pod \"redhat-marketplace-5nfwx\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.326408 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:35 crc kubenswrapper[4861]: I1003 13:57:35.773011 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nfwx"] Oct 03 13:57:36 crc kubenswrapper[4861]: I1003 13:57:36.037962 4861 generic.go:334] "Generic (PLEG): container finished" podID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerID="cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9" exitCode=0 Oct 03 13:57:36 crc kubenswrapper[4861]: I1003 13:57:36.039500 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerDied","Data":"cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9"} Oct 03 13:57:36 crc kubenswrapper[4861]: I1003 13:57:36.039529 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerStarted","Data":"fc273517bcad7a40d17bb2b0c4dde6bbd2c74515ded1ca823370292b79ebe473"} Oct 03 13:57:37 crc kubenswrapper[4861]: I1003 13:57:37.047129 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerStarted","Data":"1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931"} Oct 03 13:57:38 crc kubenswrapper[4861]: I1003 13:57:38.055642 4861 generic.go:334] "Generic (PLEG): container finished" podID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerID="1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931" exitCode=0 Oct 03 13:57:38 crc kubenswrapper[4861]: I1003 13:57:38.055706 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerDied","Data":"1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931"} Oct 03 13:57:40 crc kubenswrapper[4861]: I1003 13:57:40.079917 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerStarted","Data":"8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb"} Oct 03 13:57:40 crc kubenswrapper[4861]: I1003 13:57:40.108455 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5nfwx" podStartSLOduration=3.109205471 podStartE2EDuration="6.108424723s" podCreationTimestamp="2025-10-03 13:57:34 +0000 UTC" firstStartedPulling="2025-10-03 13:57:36.039633416 +0000 UTC m=+1570.037618463" lastFinishedPulling="2025-10-03 13:57:39.038852658 +0000 UTC m=+1573.036837715" observedRunningTime="2025-10-03 13:57:40.098012736 +0000 UTC m=+1574.095997823" watchObservedRunningTime="2025-10-03 13:57:40.108424723 +0000 UTC m=+1574.106409810" Oct 03 13:57:42 crc kubenswrapper[4861]: I1003 13:57:42.682067 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:57:42 crc kubenswrapper[4861]: E1003 13:57:42.682571 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:57:45 crc kubenswrapper[4861]: I1003 13:57:45.327511 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:45 crc kubenswrapper[4861]: I1003 13:57:45.328102 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:45 crc kubenswrapper[4861]: I1003 13:57:45.388778 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:46 crc kubenswrapper[4861]: I1003 13:57:46.181637 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:46 crc kubenswrapper[4861]: I1003 13:57:46.233095 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nfwx"] Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.156083 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5nfwx" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="registry-server" containerID="cri-o://8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb" gracePeriod=2 Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.624614 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.822583 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-utilities\") pod \"ed298cda-8891-48eb-af45-fd8746c8ab19\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.822801 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2zvm\" (UniqueName: \"kubernetes.io/projected/ed298cda-8891-48eb-af45-fd8746c8ab19-kube-api-access-r2zvm\") pod \"ed298cda-8891-48eb-af45-fd8746c8ab19\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.822876 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-catalog-content\") pod \"ed298cda-8891-48eb-af45-fd8746c8ab19\" (UID: \"ed298cda-8891-48eb-af45-fd8746c8ab19\") " Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.823895 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-utilities" (OuterVolumeSpecName: "utilities") pod "ed298cda-8891-48eb-af45-fd8746c8ab19" (UID: "ed298cda-8891-48eb-af45-fd8746c8ab19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.827498 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed298cda-8891-48eb-af45-fd8746c8ab19-kube-api-access-r2zvm" (OuterVolumeSpecName: "kube-api-access-r2zvm") pod "ed298cda-8891-48eb-af45-fd8746c8ab19" (UID: "ed298cda-8891-48eb-af45-fd8746c8ab19"). InnerVolumeSpecName "kube-api-access-r2zvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.834469 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed298cda-8891-48eb-af45-fd8746c8ab19" (UID: "ed298cda-8891-48eb-af45-fd8746c8ab19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.925620 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.925677 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2zvm\" (UniqueName: \"kubernetes.io/projected/ed298cda-8891-48eb-af45-fd8746c8ab19-kube-api-access-r2zvm\") on node \"crc\" DevicePath \"\"" Oct 03 13:57:48 crc kubenswrapper[4861]: I1003 13:57:48.925697 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed298cda-8891-48eb-af45-fd8746c8ab19-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.168448 4861 generic.go:334] "Generic (PLEG): container finished" podID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerID="8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb" exitCode=0 Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.168501 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerDied","Data":"8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb"} Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.168565 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nfwx" event={"ID":"ed298cda-8891-48eb-af45-fd8746c8ab19","Type":"ContainerDied","Data":"fc273517bcad7a40d17bb2b0c4dde6bbd2c74515ded1ca823370292b79ebe473"} Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.168560 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nfwx" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.168590 4861 scope.go:117] "RemoveContainer" containerID="8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.216492 4861 scope.go:117] "RemoveContainer" containerID="1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.219021 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nfwx"] Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.234703 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nfwx"] Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.240074 4861 scope.go:117] "RemoveContainer" containerID="cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.310360 4861 scope.go:117] "RemoveContainer" containerID="8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb" Oct 03 13:57:49 crc kubenswrapper[4861]: E1003 13:57:49.310928 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb\": container with ID starting with 8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb not found: ID does not exist" containerID="8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.310958 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb"} err="failed to get container status \"8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb\": rpc error: code = NotFound desc = could not find container \"8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb\": container with ID starting with 8a7daae141e28d5003431f26c03d5677a61e3f85ca1f8b62ee2b6754f6fc0feb not found: ID does not exist" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.310979 4861 scope.go:117] "RemoveContainer" containerID="1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931" Oct 03 13:57:49 crc kubenswrapper[4861]: E1003 13:57:49.311325 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931\": container with ID starting with 1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931 not found: ID does not exist" containerID="1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.311385 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931"} err="failed to get container status \"1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931\": rpc error: code = NotFound desc = could not find container \"1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931\": container with ID starting with 1f80baccbaf4f0fe9ba0eb0ad1ffe665c93a497fb2cd4043a657017435fa8931 not found: ID does not exist" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.311436 4861 scope.go:117] "RemoveContainer" containerID="cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9" Oct 03 13:57:49 crc kubenswrapper[4861]: E1003 13:57:49.311749 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9\": container with ID starting with cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9 not found: ID does not exist" containerID="cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9" Oct 03 13:57:49 crc kubenswrapper[4861]: I1003 13:57:49.311776 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9"} err="failed to get container status \"cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9\": rpc error: code = NotFound desc = could not find container \"cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9\": container with ID starting with cc1254ec440cf83c06079b80a131c2653c4e9fe5298459d9529fd9b28542efe9 not found: ID does not exist" Oct 03 13:57:50 crc kubenswrapper[4861]: I1003 13:57:50.694319 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" path="/var/lib/kubelet/pods/ed298cda-8891-48eb-af45-fd8746c8ab19/volumes" Oct 03 13:57:55 crc kubenswrapper[4861]: I1003 13:57:55.681774 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:57:55 crc kubenswrapper[4861]: E1003 13:57:55.682479 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:58:09 crc kubenswrapper[4861]: I1003 13:58:09.681618 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:58:09 crc kubenswrapper[4861]: E1003 13:58:09.682417 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:58:23 crc kubenswrapper[4861]: I1003 13:58:23.682432 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:58:23 crc kubenswrapper[4861]: E1003 13:58:23.683135 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:58:34 crc kubenswrapper[4861]: I1003 13:58:34.681952 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:58:34 crc kubenswrapper[4861]: E1003 13:58:34.683104 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:58:45 crc kubenswrapper[4861]: I1003 13:58:45.681907 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:58:45 crc kubenswrapper[4861]: E1003 13:58:45.682841 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:58:56 crc kubenswrapper[4861]: I1003 13:58:56.689783 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:58:56 crc kubenswrapper[4861]: E1003 13:58:56.690594 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:59:11 crc kubenswrapper[4861]: I1003 13:59:11.681585 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:59:11 crc kubenswrapper[4861]: E1003 13:59:11.682363 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:59:24 crc kubenswrapper[4861]: I1003 13:59:24.680552 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:59:24 crc kubenswrapper[4861]: E1003 13:59:24.681384 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:59:38 crc kubenswrapper[4861]: I1003 13:59:38.681385 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:59:38 crc kubenswrapper[4861]: E1003 13:59:38.682085 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:59:49 crc kubenswrapper[4861]: I1003 13:59:49.042833 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-6thsd"] Oct 03 13:59:49 crc kubenswrapper[4861]: I1003 13:59:49.052326 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-6thsd"] Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.024308 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-6jwg6"] Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.031068 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-clwv7"] Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.037701 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-6jwg6"] Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.045072 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-clwv7"] Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.681390 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 13:59:50 crc kubenswrapper[4861]: E1003 13:59:50.682027 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.695462 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bda74fa-3e6f-4a03-aa42-b3e1ea436936" path="/var/lib/kubelet/pods/5bda74fa-3e6f-4a03-aa42-b3e1ea436936/volumes" Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.696810 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88033eb7-5000-426e-9cae-8352354b0ca5" path="/var/lib/kubelet/pods/88033eb7-5000-426e-9cae-8352354b0ca5/volumes" Oct 03 13:59:50 crc kubenswrapper[4861]: I1003 13:59:50.697432 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eae347e-a22b-43bb-8fdb-f7bfb1022fdc" path="/var/lib/kubelet/pods/8eae347e-a22b-43bb-8fdb-f7bfb1022fdc/volumes" Oct 03 13:59:56 crc kubenswrapper[4861]: I1003 13:59:56.620696 4861 scope.go:117] "RemoveContainer" containerID="35c92713f444d21249d42a2920ff2bb17ad6c59e839303ffee5dd8340318b7cb" Oct 03 13:59:56 crc kubenswrapper[4861]: I1003 13:59:56.647315 4861 scope.go:117] "RemoveContainer" containerID="5bed09adce703f23c3cdfdf5ee6ae74113e17a2b33d3d78950fa4125bc1fc24c" Oct 03 13:59:56 crc kubenswrapper[4861]: I1003 13:59:56.717243 4861 scope.go:117] "RemoveContainer" containerID="c2176b17df52661b4a9aaeae7773b800759bdd492e688f9d72ef348ce3a1eb5a" Oct 03 13:59:58 crc kubenswrapper[4861]: I1003 13:59:58.053872 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lh2dn"] Oct 03 13:59:58 crc kubenswrapper[4861]: I1003 13:59:58.066133 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lh2dn"] Oct 03 13:59:58 crc kubenswrapper[4861]: I1003 13:59:58.406965 4861 generic.go:334] "Generic (PLEG): container finished" podID="88cfdf25-6cf6-4553-a95b-d49e13d2f509" containerID="65c11677b2cc71ab0fc089da1ab632d375df879d98d3555ec572d8791edc2b74" exitCode=0 Oct 03 13:59:58 crc kubenswrapper[4861]: I1003 13:59:58.407009 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" event={"ID":"88cfdf25-6cf6-4553-a95b-d49e13d2f509","Type":"ContainerDied","Data":"65c11677b2cc71ab0fc089da1ab632d375df879d98d3555ec572d8791edc2b74"} Oct 03 13:59:58 crc kubenswrapper[4861]: I1003 13:59:58.696293 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2641015-4563-45ac-bfa5-e19fcc791806" path="/var/lib/kubelet/pods/f2641015-4563-45ac-bfa5-e19fcc791806/volumes" Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.047482 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-7jdcl"] Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.060153 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-119e-account-create-jtptm"] Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.074274 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-7jdcl"] Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.082745 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-119e-account-create-jtptm"] Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.814798 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.976637 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-bootstrap-combined-ca-bundle\") pod \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.976692 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-ssh-key\") pod \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.976785 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-inventory\") pod \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.976817 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrgvd\" (UniqueName: \"kubernetes.io/projected/88cfdf25-6cf6-4553-a95b-d49e13d2f509-kube-api-access-qrgvd\") pod \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\" (UID: \"88cfdf25-6cf6-4553-a95b-d49e13d2f509\") " Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.984427 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "88cfdf25-6cf6-4553-a95b-d49e13d2f509" (UID: "88cfdf25-6cf6-4553-a95b-d49e13d2f509"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:59:59 crc kubenswrapper[4861]: I1003 13:59:59.985469 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88cfdf25-6cf6-4553-a95b-d49e13d2f509-kube-api-access-qrgvd" (OuterVolumeSpecName: "kube-api-access-qrgvd") pod "88cfdf25-6cf6-4553-a95b-d49e13d2f509" (UID: "88cfdf25-6cf6-4553-a95b-d49e13d2f509"). InnerVolumeSpecName "kube-api-access-qrgvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.016486 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-inventory" (OuterVolumeSpecName: "inventory") pod "88cfdf25-6cf6-4553-a95b-d49e13d2f509" (UID: "88cfdf25-6cf6-4553-a95b-d49e13d2f509"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.028443 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "88cfdf25-6cf6-4553-a95b-d49e13d2f509" (UID: "88cfdf25-6cf6-4553-a95b-d49e13d2f509"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.041165 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8a29-account-create-qtclr"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.065758 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-8a29-account-create-qtclr"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.074023 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-4gn45"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.079539 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.079747 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrgvd\" (UniqueName: \"kubernetes.io/projected/88cfdf25-6cf6-4553-a95b-d49e13d2f509-kube-api-access-qrgvd\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.079841 4861 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.079915 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88cfdf25-6cf6-4553-a95b-d49e13d2f509-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.085792 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-4gn45"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.145701 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc"] Oct 03 14:00:00 crc kubenswrapper[4861]: E1003 14:00:00.146158 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="extract-utilities" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.146181 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="extract-utilities" Oct 03 14:00:00 crc kubenswrapper[4861]: E1003 14:00:00.146197 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="extract-content" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.146205 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="extract-content" Oct 03 14:00:00 crc kubenswrapper[4861]: E1003 14:00:00.146224 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88cfdf25-6cf6-4553-a95b-d49e13d2f509" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.146431 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="88cfdf25-6cf6-4553-a95b-d49e13d2f509" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 14:00:00 crc kubenswrapper[4861]: E1003 14:00:00.146451 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="registry-server" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.146460 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="registry-server" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.146677 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="88cfdf25-6cf6-4553-a95b-d49e13d2f509" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.146695 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed298cda-8891-48eb-af45-fd8746c8ab19" containerName="registry-server" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.147356 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.149704 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.151180 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.158423 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.323409 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-secret-volume\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.323465 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cctfq\" (UniqueName: \"kubernetes.io/projected/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-kube-api-access-cctfq\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.323667 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-config-volume\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.424127 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" event={"ID":"88cfdf25-6cf6-4553-a95b-d49e13d2f509","Type":"ContainerDied","Data":"8e80c87361fe5d79c441a1673f5761d3669d412b13265d75d44801f0579bf8de"} Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.424169 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e80c87361fe5d79c441a1673f5761d3669d412b13265d75d44801f0579bf8de" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.424199 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.424880 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-config-volume\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.425019 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-secret-volume\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.425047 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cctfq\" (UniqueName: \"kubernetes.io/projected/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-kube-api-access-cctfq\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.425909 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-config-volume\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.430606 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-secret-volume\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.444710 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cctfq\" (UniqueName: \"kubernetes.io/projected/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-kube-api-access-cctfq\") pod \"collect-profiles-29325000-mqdpc\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.509867 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.510926 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.513324 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.513932 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.514168 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.514604 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.531005 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.531070 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ghmp\" (UniqueName: \"kubernetes.io/projected/e5903a2f-8943-4fab-8ddf-6ec1b8329590-kube-api-access-7ghmp\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.531197 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.533367 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.585133 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn"] Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.632314 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.632384 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ghmp\" (UniqueName: \"kubernetes.io/projected/e5903a2f-8943-4fab-8ddf-6ec1b8329590-kube-api-access-7ghmp\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.632459 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.646122 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.650414 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.662158 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ghmp\" (UniqueName: \"kubernetes.io/projected/e5903a2f-8943-4fab-8ddf-6ec1b8329590-kube-api-access-7ghmp\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.702792 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a715a54-ad54-4756-aa5c-32ac4cc2e1f4" path="/var/lib/kubelet/pods/1a715a54-ad54-4756-aa5c-32ac4cc2e1f4/volumes" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.705317 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f60eab9-bc53-4007-bc7e-261845584dae" path="/var/lib/kubelet/pods/6f60eab9-bc53-4007-bc7e-261845584dae/volumes" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.706094 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="900785d8-c3f4-4ba6-a925-56165ba86a6d" path="/var/lib/kubelet/pods/900785d8-c3f4-4ba6-a925-56165ba86a6d/volumes" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.706743 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a769cb74-01d8-4a40-8a92-4cc15f1d418a" path="/var/lib/kubelet/pods/a769cb74-01d8-4a40-8a92-4cc15f1d418a/volumes" Oct 03 14:00:00 crc kubenswrapper[4861]: I1003 14:00:00.831831 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.069626 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-a1af-account-create-r7n7q"] Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.102975 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-a1af-account-create-r7n7q"] Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.116905 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc"] Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.436349 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" event={"ID":"be4ceb81-4a5e-4c11-995d-9e224ff2acb7","Type":"ContainerStarted","Data":"18c220d513d2679ed116fc1a5b5f1f5f69d9c989bbd763a8fd3a2e5e546d0ab5"} Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.436660 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" event={"ID":"be4ceb81-4a5e-4c11-995d-9e224ff2acb7","Type":"ContainerStarted","Data":"c2d0ddba83dc1aec7c757b21d52f47b082b5aac2bf0656e8b206bd4803f90d8b"} Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.467200 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" podStartSLOduration=1.467178471 podStartE2EDuration="1.467178471s" podCreationTimestamp="2025-10-03 14:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:00:01.464369056 +0000 UTC m=+1715.462354123" watchObservedRunningTime="2025-10-03 14:00:01.467178471 +0000 UTC m=+1715.465163518" Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.654706 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn"] Oct 03 14:00:01 crc kubenswrapper[4861]: W1003 14:00:01.664831 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5903a2f_8943_4fab_8ddf_6ec1b8329590.slice/crio-14624d72fb32632780d7d3b80344ac32a491bd046fe74f10875df835d1f38d5f WatchSource:0}: Error finding container 14624d72fb32632780d7d3b80344ac32a491bd046fe74f10875df835d1f38d5f: Status 404 returned error can't find the container with id 14624d72fb32632780d7d3b80344ac32a491bd046fe74f10875df835d1f38d5f Oct 03 14:00:01 crc kubenswrapper[4861]: I1003 14:00:01.667186 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:00:02 crc kubenswrapper[4861]: I1003 14:00:02.447242 4861 generic.go:334] "Generic (PLEG): container finished" podID="be4ceb81-4a5e-4c11-995d-9e224ff2acb7" containerID="18c220d513d2679ed116fc1a5b5f1f5f69d9c989bbd763a8fd3a2e5e546d0ab5" exitCode=0 Oct 03 14:00:02 crc kubenswrapper[4861]: I1003 14:00:02.447298 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" event={"ID":"be4ceb81-4a5e-4c11-995d-9e224ff2acb7","Type":"ContainerDied","Data":"18c220d513d2679ed116fc1a5b5f1f5f69d9c989bbd763a8fd3a2e5e546d0ab5"} Oct 03 14:00:02 crc kubenswrapper[4861]: I1003 14:00:02.450824 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" event={"ID":"e5903a2f-8943-4fab-8ddf-6ec1b8329590","Type":"ContainerStarted","Data":"c5a9934ce4c5278e2dac6a0c11d0f3a009b91926cf92851bb16139b939dbd452"} Oct 03 14:00:02 crc kubenswrapper[4861]: I1003 14:00:02.450867 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" event={"ID":"e5903a2f-8943-4fab-8ddf-6ec1b8329590","Type":"ContainerStarted","Data":"14624d72fb32632780d7d3b80344ac32a491bd046fe74f10875df835d1f38d5f"} Oct 03 14:00:02 crc kubenswrapper[4861]: I1003 14:00:02.486193 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" podStartSLOduration=2.285596543 podStartE2EDuration="2.48617496s" podCreationTimestamp="2025-10-03 14:00:00 +0000 UTC" firstStartedPulling="2025-10-03 14:00:01.666930284 +0000 UTC m=+1715.664915331" lastFinishedPulling="2025-10-03 14:00:01.867508701 +0000 UTC m=+1715.865493748" observedRunningTime="2025-10-03 14:00:02.484887605 +0000 UTC m=+1716.482872672" watchObservedRunningTime="2025-10-03 14:00:02.48617496 +0000 UTC m=+1716.484160007" Oct 03 14:00:02 crc kubenswrapper[4861]: I1003 14:00:02.690183 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4be391e0-99d9-4191-95f7-416e9e893b30" path="/var/lib/kubelet/pods/4be391e0-99d9-4191-95f7-416e9e893b30/volumes" Oct 03 14:00:03 crc kubenswrapper[4861]: I1003 14:00:03.681632 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:00:03 crc kubenswrapper[4861]: E1003 14:00:03.682270 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:00:03 crc kubenswrapper[4861]: I1003 14:00:03.819514 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:03 crc kubenswrapper[4861]: I1003 14:00:03.997731 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-config-volume\") pod \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " Oct 03 14:00:03 crc kubenswrapper[4861]: I1003 14:00:03.997822 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-secret-volume\") pod \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " Oct 03 14:00:03 crc kubenswrapper[4861]: I1003 14:00:03.997850 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cctfq\" (UniqueName: \"kubernetes.io/projected/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-kube-api-access-cctfq\") pod \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\" (UID: \"be4ceb81-4a5e-4c11-995d-9e224ff2acb7\") " Oct 03 14:00:03 crc kubenswrapper[4861]: I1003 14:00:03.998596 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-config-volume" (OuterVolumeSpecName: "config-volume") pod "be4ceb81-4a5e-4c11-995d-9e224ff2acb7" (UID: "be4ceb81-4a5e-4c11-995d-9e224ff2acb7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.003035 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "be4ceb81-4a5e-4c11-995d-9e224ff2acb7" (UID: "be4ceb81-4a5e-4c11-995d-9e224ff2acb7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.004731 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-kube-api-access-cctfq" (OuterVolumeSpecName: "kube-api-access-cctfq") pod "be4ceb81-4a5e-4c11-995d-9e224ff2acb7" (UID: "be4ceb81-4a5e-4c11-995d-9e224ff2acb7"). InnerVolumeSpecName "kube-api-access-cctfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.100136 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.100955 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.100974 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cctfq\" (UniqueName: \"kubernetes.io/projected/be4ceb81-4a5e-4c11-995d-9e224ff2acb7-kube-api-access-cctfq\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.483359 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" event={"ID":"be4ceb81-4a5e-4c11-995d-9e224ff2acb7","Type":"ContainerDied","Data":"c2d0ddba83dc1aec7c757b21d52f47b082b5aac2bf0656e8b206bd4803f90d8b"} Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.483392 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc" Oct 03 14:00:04 crc kubenswrapper[4861]: I1003 14:00:04.483432 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2d0ddba83dc1aec7c757b21d52f47b082b5aac2bf0656e8b206bd4803f90d8b" Oct 03 14:00:16 crc kubenswrapper[4861]: I1003 14:00:16.036019 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-dc57-account-create-47dgl"] Oct 03 14:00:16 crc kubenswrapper[4861]: I1003 14:00:16.047752 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-dc57-account-create-47dgl"] Oct 03 14:00:16 crc kubenswrapper[4861]: I1003 14:00:16.688508 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:00:16 crc kubenswrapper[4861]: E1003 14:00:16.688854 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:00:16 crc kubenswrapper[4861]: I1003 14:00:16.694710 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeebffc3-09a2-4d1b-95e5-abf4ed19fe26" path="/var/lib/kubelet/pods/aeebffc3-09a2-4d1b-95e5-abf4ed19fe26/volumes" Oct 03 14:00:18 crc kubenswrapper[4861]: I1003 14:00:18.044766 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-gnswn"] Oct 03 14:00:18 crc kubenswrapper[4861]: I1003 14:00:18.068039 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-gnswn"] Oct 03 14:00:18 crc kubenswrapper[4861]: I1003 14:00:18.692956 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9041b332-7c62-48b3-827e-e74f29984d35" path="/var/lib/kubelet/pods/9041b332-7c62-48b3-827e-e74f29984d35/volumes" Oct 03 14:00:19 crc kubenswrapper[4861]: I1003 14:00:19.033866 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6c39-account-create-zbrwd"] Oct 03 14:00:19 crc kubenswrapper[4861]: I1003 14:00:19.049079 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6c39-account-create-zbrwd"] Oct 03 14:00:19 crc kubenswrapper[4861]: I1003 14:00:19.058487 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8da1-account-create-wkd4r"] Oct 03 14:00:19 crc kubenswrapper[4861]: I1003 14:00:19.066440 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8da1-account-create-wkd4r"] Oct 03 14:00:20 crc kubenswrapper[4861]: I1003 14:00:20.698101 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7129a35f-e546-430e-9cf5-e18ad1e429a4" path="/var/lib/kubelet/pods/7129a35f-e546-430e-9cf5-e18ad1e429a4/volumes" Oct 03 14:00:20 crc kubenswrapper[4861]: I1003 14:00:20.699379 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6abe193-a49f-43f6-ada9-814ebc997a0f" path="/var/lib/kubelet/pods/b6abe193-a49f-43f6-ada9-814ebc997a0f/volumes" Oct 03 14:00:21 crc kubenswrapper[4861]: I1003 14:00:21.169477 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-rx55k" podUID="9bf321cf-e938-46ed-b8b9-01418f85de45" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.81:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 14:00:31 crc kubenswrapper[4861]: I1003 14:00:31.681911 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:00:31 crc kubenswrapper[4861]: E1003 14:00:31.682825 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:00:43 crc kubenswrapper[4861]: I1003 14:00:43.027917 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-fb9qp"] Oct 03 14:00:43 crc kubenswrapper[4861]: I1003 14:00:43.035995 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-fb9qp"] Oct 03 14:00:43 crc kubenswrapper[4861]: I1003 14:00:43.680935 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:00:43 crc kubenswrapper[4861]: E1003 14:00:43.681193 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:00:44 crc kubenswrapper[4861]: I1003 14:00:44.690635 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caf4725e-be55-4527-b7b6-3be4e6e1999d" path="/var/lib/kubelet/pods/caf4725e-be55-4527-b7b6-3be4e6e1999d/volumes" Oct 03 14:00:56 crc kubenswrapper[4861]: I1003 14:00:56.857846 4861 scope.go:117] "RemoveContainer" containerID="00b354772bbc010bd66cf6d4c53c4595fe46beb7ceebef3b61b37fb353fb6846" Oct 03 14:00:56 crc kubenswrapper[4861]: I1003 14:00:56.891803 4861 scope.go:117] "RemoveContainer" containerID="546852f78f46e7728d2b4ef1e06b55e7af4dee473bce7508ecc536170265dbb9" Oct 03 14:00:56 crc kubenswrapper[4861]: I1003 14:00:56.938808 4861 scope.go:117] "RemoveContainer" containerID="da89576fc34a0d2e3706d450553ac9dc9910cd7ce7c0dfee8451c5d7f3b9037d" Oct 03 14:00:56 crc kubenswrapper[4861]: I1003 14:00:56.998525 4861 scope.go:117] "RemoveContainer" containerID="2554a61636729dafd3c60fca77e8f66f6052cbb6c412d3c2306e97fdf30ef074" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.034961 4861 scope.go:117] "RemoveContainer" containerID="b274a087c1406dd905bded959a2774284b72781b5a9b6e4dc14bd8aa513e5d38" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.089797 4861 scope.go:117] "RemoveContainer" containerID="1bdb71380a95c723d0a3c150144ef73bc08b24c8c4449ff46f0fd3706ce9c08f" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.122647 4861 scope.go:117] "RemoveContainer" containerID="5e728610a658b78264dfefb74148a39a877d4c44dedf122ee62143cfe39e8b7d" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.143140 4861 scope.go:117] "RemoveContainer" containerID="f8d2efc99fc79879962fc1bd0c0c703a2f8c52aa7de6832c75cb14b66b64ff12" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.160553 4861 scope.go:117] "RemoveContainer" containerID="ca883022040a30e299b696339077f40674b6a8363b0c76c2fd8f8e7350acb223" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.185845 4861 scope.go:117] "RemoveContainer" containerID="ba4b4f89c07fc32412e3361f1e92e19badc5f598fbc66c3466eb6a941467435c" Oct 03 14:00:57 crc kubenswrapper[4861]: I1003 14:00:57.203807 4861 scope.go:117] "RemoveContainer" containerID="d52e054065d56b47da01e205478c11cd4629fde413f4de1ac80393ae839fa96d" Oct 03 14:00:58 crc kubenswrapper[4861]: I1003 14:00:58.681282 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:00:58 crc kubenswrapper[4861]: E1003 14:00:58.681851 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:00:59 crc kubenswrapper[4861]: I1003 14:00:59.047371 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-cgf9r"] Oct 03 14:00:59 crc kubenswrapper[4861]: I1003 14:00:59.054741 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-cgf9r"] Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.149972 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29325001-vhzr2"] Oct 03 14:01:00 crc kubenswrapper[4861]: E1003 14:01:00.150718 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be4ceb81-4a5e-4c11-995d-9e224ff2acb7" containerName="collect-profiles" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.150733 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="be4ceb81-4a5e-4c11-995d-9e224ff2acb7" containerName="collect-profiles" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.150905 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="be4ceb81-4a5e-4c11-995d-9e224ff2acb7" containerName="collect-profiles" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.151616 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.160959 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325001-vhzr2"] Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.298683 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-combined-ca-bundle\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.298771 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-fernet-keys\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.298795 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h6kh\" (UniqueName: \"kubernetes.io/projected/eb217f97-9f13-4711-b3ab-f449bdc34bae-kube-api-access-6h6kh\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.298812 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-config-data\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.400658 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-fernet-keys\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.401015 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h6kh\" (UniqueName: \"kubernetes.io/projected/eb217f97-9f13-4711-b3ab-f449bdc34bae-kube-api-access-6h6kh\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.401313 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-config-data\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.401762 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-combined-ca-bundle\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.406659 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-combined-ca-bundle\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.406732 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-fernet-keys\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.410281 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-config-data\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.428579 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h6kh\" (UniqueName: \"kubernetes.io/projected/eb217f97-9f13-4711-b3ab-f449bdc34bae-kube-api-access-6h6kh\") pod \"keystone-cron-29325001-vhzr2\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.475353 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.692905 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7415066-e954-4d19-9167-1a795f87add7" path="/var/lib/kubelet/pods/a7415066-e954-4d19-9167-1a795f87add7/volumes" Oct 03 14:01:00 crc kubenswrapper[4861]: I1003 14:01:00.917026 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325001-vhzr2"] Oct 03 14:01:00 crc kubenswrapper[4861]: W1003 14:01:00.923197 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb217f97_9f13_4711_b3ab_f449bdc34bae.slice/crio-ccd7f872923c979c472a3d32987c189248a1f12ff59a1e24aff34897ed4f114f WatchSource:0}: Error finding container ccd7f872923c979c472a3d32987c189248a1f12ff59a1e24aff34897ed4f114f: Status 404 returned error can't find the container with id ccd7f872923c979c472a3d32987c189248a1f12ff59a1e24aff34897ed4f114f Oct 03 14:01:01 crc kubenswrapper[4861]: I1003 14:01:01.014801 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-vhzr2" event={"ID":"eb217f97-9f13-4711-b3ab-f449bdc34bae","Type":"ContainerStarted","Data":"ccd7f872923c979c472a3d32987c189248a1f12ff59a1e24aff34897ed4f114f"} Oct 03 14:01:02 crc kubenswrapper[4861]: I1003 14:01:02.023159 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-vhzr2" event={"ID":"eb217f97-9f13-4711-b3ab-f449bdc34bae","Type":"ContainerStarted","Data":"46c2da44c2cf76d2fa017bc5885078af7d82885a05522f56ba914b65aa22c0a9"} Oct 03 14:01:02 crc kubenswrapper[4861]: I1003 14:01:02.041614 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29325001-vhzr2" podStartSLOduration=2.041596544 podStartE2EDuration="2.041596544s" podCreationTimestamp="2025-10-03 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:01:02.037292968 +0000 UTC m=+1776.035278035" watchObservedRunningTime="2025-10-03 14:01:02.041596544 +0000 UTC m=+1776.039581591" Oct 03 14:01:05 crc kubenswrapper[4861]: I1003 14:01:05.054165 4861 generic.go:334] "Generic (PLEG): container finished" podID="eb217f97-9f13-4711-b3ab-f449bdc34bae" containerID="46c2da44c2cf76d2fa017bc5885078af7d82885a05522f56ba914b65aa22c0a9" exitCode=0 Oct 03 14:01:05 crc kubenswrapper[4861]: I1003 14:01:05.054205 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-vhzr2" event={"ID":"eb217f97-9f13-4711-b3ab-f449bdc34bae","Type":"ContainerDied","Data":"46c2da44c2cf76d2fa017bc5885078af7d82885a05522f56ba914b65aa22c0a9"} Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.397983 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.517309 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-config-data\") pod \"eb217f97-9f13-4711-b3ab-f449bdc34bae\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.517360 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6h6kh\" (UniqueName: \"kubernetes.io/projected/eb217f97-9f13-4711-b3ab-f449bdc34bae-kube-api-access-6h6kh\") pod \"eb217f97-9f13-4711-b3ab-f449bdc34bae\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.517498 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-fernet-keys\") pod \"eb217f97-9f13-4711-b3ab-f449bdc34bae\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.517552 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-combined-ca-bundle\") pod \"eb217f97-9f13-4711-b3ab-f449bdc34bae\" (UID: \"eb217f97-9f13-4711-b3ab-f449bdc34bae\") " Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.523581 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "eb217f97-9f13-4711-b3ab-f449bdc34bae" (UID: "eb217f97-9f13-4711-b3ab-f449bdc34bae"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.536448 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb217f97-9f13-4711-b3ab-f449bdc34bae-kube-api-access-6h6kh" (OuterVolumeSpecName: "kube-api-access-6h6kh") pod "eb217f97-9f13-4711-b3ab-f449bdc34bae" (UID: "eb217f97-9f13-4711-b3ab-f449bdc34bae"). InnerVolumeSpecName "kube-api-access-6h6kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.549733 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb217f97-9f13-4711-b3ab-f449bdc34bae" (UID: "eb217f97-9f13-4711-b3ab-f449bdc34bae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.576716 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-config-data" (OuterVolumeSpecName: "config-data") pod "eb217f97-9f13-4711-b3ab-f449bdc34bae" (UID: "eb217f97-9f13-4711-b3ab-f449bdc34bae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.620378 4861 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.621370 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.621420 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb217f97-9f13-4711-b3ab-f449bdc34bae-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:06 crc kubenswrapper[4861]: I1003 14:01:06.621437 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6h6kh\" (UniqueName: \"kubernetes.io/projected/eb217f97-9f13-4711-b3ab-f449bdc34bae-kube-api-access-6h6kh\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:07 crc kubenswrapper[4861]: I1003 14:01:07.074082 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-vhzr2" event={"ID":"eb217f97-9f13-4711-b3ab-f449bdc34bae","Type":"ContainerDied","Data":"ccd7f872923c979c472a3d32987c189248a1f12ff59a1e24aff34897ed4f114f"} Oct 03 14:01:07 crc kubenswrapper[4861]: I1003 14:01:07.074353 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccd7f872923c979c472a3d32987c189248a1f12ff59a1e24aff34897ed4f114f" Oct 03 14:01:07 crc kubenswrapper[4861]: I1003 14:01:07.074142 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-vhzr2" Oct 03 14:01:11 crc kubenswrapper[4861]: I1003 14:01:11.681683 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:01:11 crc kubenswrapper[4861]: E1003 14:01:11.682271 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:01:15 crc kubenswrapper[4861]: I1003 14:01:15.033515 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cmgtn"] Oct 03 14:01:15 crc kubenswrapper[4861]: I1003 14:01:15.040594 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-f6j5h"] Oct 03 14:01:15 crc kubenswrapper[4861]: I1003 14:01:15.049915 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cmgtn"] Oct 03 14:01:15 crc kubenswrapper[4861]: I1003 14:01:15.060462 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-f6j5h"] Oct 03 14:01:16 crc kubenswrapper[4861]: I1003 14:01:16.717154 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b148d26-3aac-44de-9776-c03b03c5fff2" path="/var/lib/kubelet/pods/7b148d26-3aac-44de-9776-c03b03c5fff2/volumes" Oct 03 14:01:16 crc kubenswrapper[4861]: I1003 14:01:16.718104 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd37928f-4e28-4ff6-b3bf-5baa2941c432" path="/var/lib/kubelet/pods/dd37928f-4e28-4ff6-b3bf-5baa2941c432/volumes" Oct 03 14:01:22 crc kubenswrapper[4861]: I1003 14:01:22.689063 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:01:22 crc kubenswrapper[4861]: E1003 14:01:22.690088 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:01:26 crc kubenswrapper[4861]: I1003 14:01:26.080723 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-h4wcn"] Oct 03 14:01:26 crc kubenswrapper[4861]: I1003 14:01:26.097818 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-h4wcn"] Oct 03 14:01:26 crc kubenswrapper[4861]: I1003 14:01:26.696170 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b0f621f-bdf0-4768-a764-0bc15e01faba" path="/var/lib/kubelet/pods/3b0f621f-bdf0-4768-a764-0bc15e01faba/volumes" Oct 03 14:01:28 crc kubenswrapper[4861]: I1003 14:01:28.044979 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-ksvcg"] Oct 03 14:01:28 crc kubenswrapper[4861]: I1003 14:01:28.058780 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-ksvcg"] Oct 03 14:01:28 crc kubenswrapper[4861]: I1003 14:01:28.703223 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="456069ef-db45-4878-85d5-1e5001fa789e" path="/var/lib/kubelet/pods/456069ef-db45-4878-85d5-1e5001fa789e/volumes" Oct 03 14:01:37 crc kubenswrapper[4861]: I1003 14:01:37.680993 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:01:37 crc kubenswrapper[4861]: E1003 14:01:37.681786 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:01:49 crc kubenswrapper[4861]: I1003 14:01:49.682003 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:01:49 crc kubenswrapper[4861]: E1003 14:01:49.682914 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:01:57 crc kubenswrapper[4861]: I1003 14:01:57.376712 4861 scope.go:117] "RemoveContainer" containerID="18445312989afd8d0cf13f473f3e53bf8daf394310804e65cbb55ceef7c0116e" Oct 03 14:01:57 crc kubenswrapper[4861]: I1003 14:01:57.428525 4861 scope.go:117] "RemoveContainer" containerID="cbeb3106bef3beac18cf97956e4fc960ae1f5b838716bc6b7faee440906ea050" Oct 03 14:01:57 crc kubenswrapper[4861]: I1003 14:01:57.468306 4861 scope.go:117] "RemoveContainer" containerID="0c38c0749c2fee8296c898b93d4f827db7e259dfb4be403af990d096498787d6" Oct 03 14:01:57 crc kubenswrapper[4861]: I1003 14:01:57.533793 4861 scope.go:117] "RemoveContainer" containerID="4c4adbc9f6f31d3c37ae6cbde5ed1a833e9d9b61cb14a154f12f609ed560cbfd" Oct 03 14:01:57 crc kubenswrapper[4861]: I1003 14:01:57.580116 4861 scope.go:117] "RemoveContainer" containerID="ebc96b17736f951d05b7af82a83fb68c57336d48d0621a313b8ce7a6beefa295" Oct 03 14:02:00 crc kubenswrapper[4861]: I1003 14:02:00.681468 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:02:01 crc kubenswrapper[4861]: I1003 14:02:01.611635 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"f064e66ff80057718d5d7200a474f2f2e7de2018d0db330fa44860780399accf"} Oct 03 14:02:03 crc kubenswrapper[4861]: I1003 14:02:03.634872 4861 generic.go:334] "Generic (PLEG): container finished" podID="e5903a2f-8943-4fab-8ddf-6ec1b8329590" containerID="c5a9934ce4c5278e2dac6a0c11d0f3a009b91926cf92851bb16139b939dbd452" exitCode=0 Oct 03 14:02:03 crc kubenswrapper[4861]: I1003 14:02:03.635414 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" event={"ID":"e5903a2f-8943-4fab-8ddf-6ec1b8329590","Type":"ContainerDied","Data":"c5a9934ce4c5278e2dac6a0c11d0f3a009b91926cf92851bb16139b939dbd452"} Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.110778 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.198607 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ghmp\" (UniqueName: \"kubernetes.io/projected/e5903a2f-8943-4fab-8ddf-6ec1b8329590-kube-api-access-7ghmp\") pod \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.198846 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-ssh-key\") pod \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.198901 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-inventory\") pod \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\" (UID: \"e5903a2f-8943-4fab-8ddf-6ec1b8329590\") " Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.205701 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5903a2f-8943-4fab-8ddf-6ec1b8329590-kube-api-access-7ghmp" (OuterVolumeSpecName: "kube-api-access-7ghmp") pod "e5903a2f-8943-4fab-8ddf-6ec1b8329590" (UID: "e5903a2f-8943-4fab-8ddf-6ec1b8329590"). InnerVolumeSpecName "kube-api-access-7ghmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.243911 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-inventory" (OuterVolumeSpecName: "inventory") pod "e5903a2f-8943-4fab-8ddf-6ec1b8329590" (UID: "e5903a2f-8943-4fab-8ddf-6ec1b8329590"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.263508 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e5903a2f-8943-4fab-8ddf-6ec1b8329590" (UID: "e5903a2f-8943-4fab-8ddf-6ec1b8329590"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.300607 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.300635 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5903a2f-8943-4fab-8ddf-6ec1b8329590-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.300645 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ghmp\" (UniqueName: \"kubernetes.io/projected/e5903a2f-8943-4fab-8ddf-6ec1b8329590-kube-api-access-7ghmp\") on node \"crc\" DevicePath \"\"" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.662888 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" event={"ID":"e5903a2f-8943-4fab-8ddf-6ec1b8329590","Type":"ContainerDied","Data":"14624d72fb32632780d7d3b80344ac32a491bd046fe74f10875df835d1f38d5f"} Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.662947 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14624d72fb32632780d7d3b80344ac32a491bd046fe74f10875df835d1f38d5f" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.663013 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.801200 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9"] Oct 03 14:02:05 crc kubenswrapper[4861]: E1003 14:02:05.801956 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5903a2f-8943-4fab-8ddf-6ec1b8329590" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.801978 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5903a2f-8943-4fab-8ddf-6ec1b8329590" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 14:02:05 crc kubenswrapper[4861]: E1003 14:02:05.801994 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb217f97-9f13-4711-b3ab-f449bdc34bae" containerName="keystone-cron" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.802003 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb217f97-9f13-4711-b3ab-f449bdc34bae" containerName="keystone-cron" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.802282 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb217f97-9f13-4711-b3ab-f449bdc34bae" containerName="keystone-cron" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.802319 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5903a2f-8943-4fab-8ddf-6ec1b8329590" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.803036 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.809621 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.809712 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.809990 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.811018 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.825666 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9"] Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.913596 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjmn5\" (UniqueName: \"kubernetes.io/projected/15a74413-2b7d-42e1-9b05-e50d739dfd39-kube-api-access-rjmn5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.913654 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:05 crc kubenswrapper[4861]: I1003 14:02:05.913878 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.015297 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.015578 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.015725 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjmn5\" (UniqueName: \"kubernetes.io/projected/15a74413-2b7d-42e1-9b05-e50d739dfd39-kube-api-access-rjmn5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.023944 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.024713 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.051385 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjmn5\" (UniqueName: \"kubernetes.io/projected/15a74413-2b7d-42e1-9b05-e50d739dfd39-kube-api-access-rjmn5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:06 crc kubenswrapper[4861]: I1003 14:02:06.123488 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:02:07 crc kubenswrapper[4861]: I1003 14:02:07.512789 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9"] Oct 03 14:02:07 crc kubenswrapper[4861]: I1003 14:02:07.680150 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" event={"ID":"15a74413-2b7d-42e1-9b05-e50d739dfd39","Type":"ContainerStarted","Data":"27bb3da697ad91dd53747e3a64cbd1355b4972a9787587b7430ece0755dcf05a"} Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.049267 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-mvb4f"] Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.061870 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-vwnhz"] Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.072558 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-nrf7f"] Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.080420 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-mvb4f"] Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.087606 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-vwnhz"] Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.094888 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-nrf7f"] Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.698888 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="741f5e11-d2d0-405c-9826-929d9b2b072b" path="/var/lib/kubelet/pods/741f5e11-d2d0-405c-9826-929d9b2b072b/volumes" Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.700685 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b99e855f-4dbf-498a-b942-e22c86d392d2" path="/var/lib/kubelet/pods/b99e855f-4dbf-498a-b942-e22c86d392d2/volumes" Oct 03 14:02:14 crc kubenswrapper[4861]: I1003 14:02:14.702448 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de3aa019-3497-44f8-a749-0f04313edd2a" path="/var/lib/kubelet/pods/de3aa019-3497-44f8-a749-0f04313edd2a/volumes" Oct 03 14:02:16 crc kubenswrapper[4861]: I1003 14:02:16.782539 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" event={"ID":"15a74413-2b7d-42e1-9b05-e50d739dfd39","Type":"ContainerStarted","Data":"fc8ad036d85accc5acc90658c25107dbe6008850ba41090c1ff833649328e814"} Oct 03 14:02:16 crc kubenswrapper[4861]: I1003 14:02:16.815512 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" podStartSLOduration=3.314350299 podStartE2EDuration="11.815494545s" podCreationTimestamp="2025-10-03 14:02:05 +0000 UTC" firstStartedPulling="2025-10-03 14:02:07.520053481 +0000 UTC m=+1841.518038528" lastFinishedPulling="2025-10-03 14:02:16.021197717 +0000 UTC m=+1850.019182774" observedRunningTime="2025-10-03 14:02:16.806044432 +0000 UTC m=+1850.804029469" watchObservedRunningTime="2025-10-03 14:02:16.815494545 +0000 UTC m=+1850.813479592" Oct 03 14:02:29 crc kubenswrapper[4861]: I1003 14:02:29.081040 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-67b7-account-create-fmlf7"] Oct 03 14:02:29 crc kubenswrapper[4861]: I1003 14:02:29.093839 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1189-account-create-9r84n"] Oct 03 14:02:29 crc kubenswrapper[4861]: I1003 14:02:29.103173 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-67b7-account-create-fmlf7"] Oct 03 14:02:29 crc kubenswrapper[4861]: I1003 14:02:29.135012 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-9a1b-account-create-6vwmh"] Oct 03 14:02:29 crc kubenswrapper[4861]: I1003 14:02:29.146099 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1189-account-create-9r84n"] Oct 03 14:02:29 crc kubenswrapper[4861]: I1003 14:02:29.154249 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-9a1b-account-create-6vwmh"] Oct 03 14:02:30 crc kubenswrapper[4861]: I1003 14:02:30.698092 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12d3cd9b-e5e3-4b38-a84b-af709f55101d" path="/var/lib/kubelet/pods/12d3cd9b-e5e3-4b38-a84b-af709f55101d/volumes" Oct 03 14:02:30 crc kubenswrapper[4861]: I1003 14:02:30.699106 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14b33ce2-2360-44ed-93ac-3ccfbeeceab9" path="/var/lib/kubelet/pods/14b33ce2-2360-44ed-93ac-3ccfbeeceab9/volumes" Oct 03 14:02:30 crc kubenswrapper[4861]: I1003 14:02:30.699771 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d594b62-d7f8-4dde-b366-b5389e6315e6" path="/var/lib/kubelet/pods/7d594b62-d7f8-4dde-b366-b5389e6315e6/volumes" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.730345 4861 scope.go:117] "RemoveContainer" containerID="c02ec4cb324a8795a5afed12c060013264238460dae4c58492407110929bde31" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.755059 4861 scope.go:117] "RemoveContainer" containerID="8dadb41dbb7cd3cc1c5020677b6b5198eef1163fe42c0c22da9d09335851ca55" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.824728 4861 scope.go:117] "RemoveContainer" containerID="07c2af27ff886eadebdfa623c717fae4a04ebd63f2adadb301c39c3b5f1a0cc5" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.857252 4861 scope.go:117] "RemoveContainer" containerID="b63d596611496b1bf60a09cd0a92a97938c3fb47048907a23288fd26c7ef2972" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.905578 4861 scope.go:117] "RemoveContainer" containerID="7dfa1e5dcb91520110a5b70f91b06059bef2e15ab26dd659ecb77af53e0f9f15" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.951503 4861 scope.go:117] "RemoveContainer" containerID="c7fc00420bbae860764b323536b50bfa4646e6aedf95a89a85b391bdc63a4ec5" Oct 03 14:02:57 crc kubenswrapper[4861]: I1003 14:02:57.992186 4861 scope.go:117] "RemoveContainer" containerID="d097e3d70c620a3a71c9105e49d81f0f01983fda9593d971db74be64f45c0bd8" Oct 03 14:02:58 crc kubenswrapper[4861]: I1003 14:02:58.034855 4861 scope.go:117] "RemoveContainer" containerID="95c367fdf3ae845cef2e8a6513d80d93563ab8024ec7937ca3119cdcf0852f4c" Oct 03 14:02:58 crc kubenswrapper[4861]: I1003 14:02:58.064409 4861 scope.go:117] "RemoveContainer" containerID="56a9a18bc568ddb1e6cac074a993e40e90955d696853240f40c9a7c93d0aadf9" Oct 03 14:03:00 crc kubenswrapper[4861]: I1003 14:03:00.062871 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wk4nw"] Oct 03 14:03:00 crc kubenswrapper[4861]: I1003 14:03:00.071798 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wk4nw"] Oct 03 14:03:00 crc kubenswrapper[4861]: I1003 14:03:00.691951 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88488b7-b658-4fc0-8d27-7f69eb12d4c4" path="/var/lib/kubelet/pods/a88488b7-b658-4fc0-8d27-7f69eb12d4c4/volumes" Oct 03 14:03:26 crc kubenswrapper[4861]: I1003 14:03:26.063772 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-ngd8x"] Oct 03 14:03:26 crc kubenswrapper[4861]: I1003 14:03:26.090504 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-ngd8x"] Oct 03 14:03:26 crc kubenswrapper[4861]: I1003 14:03:26.697589 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0acf7c1b-380a-44b8-8542-6610bbc1f700" path="/var/lib/kubelet/pods/0acf7c1b-380a-44b8-8542-6610bbc1f700/volumes" Oct 03 14:03:27 crc kubenswrapper[4861]: I1003 14:03:27.032669 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hgvnj"] Oct 03 14:03:27 crc kubenswrapper[4861]: I1003 14:03:27.040274 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hgvnj"] Oct 03 14:03:28 crc kubenswrapper[4861]: I1003 14:03:28.701890 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f57ff497-fc3f-4872-b1e3-3927895d7c6c" path="/var/lib/kubelet/pods/f57ff497-fc3f-4872-b1e3-3927895d7c6c/volumes" Oct 03 14:03:36 crc kubenswrapper[4861]: I1003 14:03:36.539722 4861 generic.go:334] "Generic (PLEG): container finished" podID="15a74413-2b7d-42e1-9b05-e50d739dfd39" containerID="fc8ad036d85accc5acc90658c25107dbe6008850ba41090c1ff833649328e814" exitCode=0 Oct 03 14:03:36 crc kubenswrapper[4861]: I1003 14:03:36.539812 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" event={"ID":"15a74413-2b7d-42e1-9b05-e50d739dfd39","Type":"ContainerDied","Data":"fc8ad036d85accc5acc90658c25107dbe6008850ba41090c1ff833649328e814"} Oct 03 14:03:37 crc kubenswrapper[4861]: I1003 14:03:37.963067 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.074038 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-inventory\") pod \"15a74413-2b7d-42e1-9b05-e50d739dfd39\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.074101 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-ssh-key\") pod \"15a74413-2b7d-42e1-9b05-e50d739dfd39\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.074131 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjmn5\" (UniqueName: \"kubernetes.io/projected/15a74413-2b7d-42e1-9b05-e50d739dfd39-kube-api-access-rjmn5\") pod \"15a74413-2b7d-42e1-9b05-e50d739dfd39\" (UID: \"15a74413-2b7d-42e1-9b05-e50d739dfd39\") " Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.080207 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a74413-2b7d-42e1-9b05-e50d739dfd39-kube-api-access-rjmn5" (OuterVolumeSpecName: "kube-api-access-rjmn5") pod "15a74413-2b7d-42e1-9b05-e50d739dfd39" (UID: "15a74413-2b7d-42e1-9b05-e50d739dfd39"). InnerVolumeSpecName "kube-api-access-rjmn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.108740 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "15a74413-2b7d-42e1-9b05-e50d739dfd39" (UID: "15a74413-2b7d-42e1-9b05-e50d739dfd39"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.136455 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-inventory" (OuterVolumeSpecName: "inventory") pod "15a74413-2b7d-42e1-9b05-e50d739dfd39" (UID: "15a74413-2b7d-42e1-9b05-e50d739dfd39"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.176315 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.176351 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15a74413-2b7d-42e1-9b05-e50d739dfd39-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.176360 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjmn5\" (UniqueName: \"kubernetes.io/projected/15a74413-2b7d-42e1-9b05-e50d739dfd39-kube-api-access-rjmn5\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.565496 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" event={"ID":"15a74413-2b7d-42e1-9b05-e50d739dfd39","Type":"ContainerDied","Data":"27bb3da697ad91dd53747e3a64cbd1355b4972a9787587b7430ece0755dcf05a"} Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.565861 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27bb3da697ad91dd53747e3a64cbd1355b4972a9787587b7430ece0755dcf05a" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.565575 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.695628 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz"] Oct 03 14:03:38 crc kubenswrapper[4861]: E1003 14:03:38.695988 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a74413-2b7d-42e1-9b05-e50d739dfd39" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.696008 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a74413-2b7d-42e1-9b05-e50d739dfd39" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.696275 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a74413-2b7d-42e1-9b05-e50d739dfd39" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.697111 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.699613 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.699942 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.704079 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.704411 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.705748 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz"] Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.788967 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.789058 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzrkx\" (UniqueName: \"kubernetes.io/projected/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-kube-api-access-dzrkx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.789134 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.891155 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.891278 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzrkx\" (UniqueName: \"kubernetes.io/projected/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-kube-api-access-dzrkx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.891677 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.898988 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.903215 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:38 crc kubenswrapper[4861]: I1003 14:03:38.916298 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzrkx\" (UniqueName: \"kubernetes.io/projected/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-kube-api-access-dzrkx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:39 crc kubenswrapper[4861]: I1003 14:03:39.039276 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:39 crc kubenswrapper[4861]: I1003 14:03:39.622496 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz"] Oct 03 14:03:40 crc kubenswrapper[4861]: I1003 14:03:40.586716 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" event={"ID":"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3","Type":"ContainerStarted","Data":"4f9a7fbedb29df58627ec710b0c7136a644cbc7ed4d2d5578f067212a7777e55"} Oct 03 14:03:40 crc kubenswrapper[4861]: I1003 14:03:40.587214 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" event={"ID":"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3","Type":"ContainerStarted","Data":"bac53d51c26a72b8816085adb88e8c63b11c8183ffc6418e7d18f2527f82e3d7"} Oct 03 14:03:40 crc kubenswrapper[4861]: I1003 14:03:40.609364 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" podStartSLOduration=2.419592048 podStartE2EDuration="2.609340627s" podCreationTimestamp="2025-10-03 14:03:38 +0000 UTC" firstStartedPulling="2025-10-03 14:03:39.626282794 +0000 UTC m=+1933.624267851" lastFinishedPulling="2025-10-03 14:03:39.816031383 +0000 UTC m=+1933.814016430" observedRunningTime="2025-10-03 14:03:40.602922905 +0000 UTC m=+1934.600907952" watchObservedRunningTime="2025-10-03 14:03:40.609340627 +0000 UTC m=+1934.607325694" Oct 03 14:03:45 crc kubenswrapper[4861]: I1003 14:03:45.640542 4861 generic.go:334] "Generic (PLEG): container finished" podID="c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" containerID="4f9a7fbedb29df58627ec710b0c7136a644cbc7ed4d2d5578f067212a7777e55" exitCode=0 Oct 03 14:03:45 crc kubenswrapper[4861]: I1003 14:03:45.640675 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" event={"ID":"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3","Type":"ContainerDied","Data":"4f9a7fbedb29df58627ec710b0c7136a644cbc7ed4d2d5578f067212a7777e55"} Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.086524 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.176083 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzrkx\" (UniqueName: \"kubernetes.io/projected/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-kube-api-access-dzrkx\") pod \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.176209 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-inventory\") pod \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.176463 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-ssh-key\") pod \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\" (UID: \"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3\") " Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.181133 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-kube-api-access-dzrkx" (OuterVolumeSpecName: "kube-api-access-dzrkx") pod "c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" (UID: "c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3"). InnerVolumeSpecName "kube-api-access-dzrkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.207867 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-inventory" (OuterVolumeSpecName: "inventory") pod "c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" (UID: "c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.208238 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" (UID: "c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.279436 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.279540 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzrkx\" (UniqueName: \"kubernetes.io/projected/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-kube-api-access-dzrkx\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.279565 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.665905 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" event={"ID":"c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3","Type":"ContainerDied","Data":"bac53d51c26a72b8816085adb88e8c63b11c8183ffc6418e7d18f2527f82e3d7"} Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.665975 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bac53d51c26a72b8816085adb88e8c63b11c8183ffc6418e7d18f2527f82e3d7" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.666477 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.775300 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t"] Oct 03 14:03:47 crc kubenswrapper[4861]: E1003 14:03:47.775944 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.775977 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.776303 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.777357 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.779874 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.780766 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.780907 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.781021 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.785450 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t"] Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.790509 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.790558 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.790725 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7zv4\" (UniqueName: \"kubernetes.io/projected/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-kube-api-access-z7zv4\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.892080 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.892216 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7zv4\" (UniqueName: \"kubernetes.io/projected/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-kube-api-access-z7zv4\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.892301 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.896413 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.897022 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:47 crc kubenswrapper[4861]: I1003 14:03:47.906644 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7zv4\" (UniqueName: \"kubernetes.io/projected/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-kube-api-access-z7zv4\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hk68t\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:48 crc kubenswrapper[4861]: I1003 14:03:48.100771 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:03:48 crc kubenswrapper[4861]: I1003 14:03:48.725414 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t"] Oct 03 14:03:49 crc kubenswrapper[4861]: I1003 14:03:49.686860 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" event={"ID":"fd61a6d8-4264-4d03-8891-c1bdf462fa7b","Type":"ContainerStarted","Data":"869119821ed8eabaa928ca855eb234159ae3db3875f343629daa2cde2e6fbdbc"} Oct 03 14:03:49 crc kubenswrapper[4861]: I1003 14:03:49.687207 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" event={"ID":"fd61a6d8-4264-4d03-8891-c1bdf462fa7b","Type":"ContainerStarted","Data":"a14c792e41cd20e274e6b50ae7a02b30ba3173b960f402f5dec2e608a89373fd"} Oct 03 14:03:49 crc kubenswrapper[4861]: I1003 14:03:49.712212 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" podStartSLOduration=2.560061029 podStartE2EDuration="2.712192352s" podCreationTimestamp="2025-10-03 14:03:47 +0000 UTC" firstStartedPulling="2025-10-03 14:03:48.746683468 +0000 UTC m=+1942.744668515" lastFinishedPulling="2025-10-03 14:03:48.898814791 +0000 UTC m=+1942.896799838" observedRunningTime="2025-10-03 14:03:49.7065106 +0000 UTC m=+1943.704495657" watchObservedRunningTime="2025-10-03 14:03:49.712192352 +0000 UTC m=+1943.710177399" Oct 03 14:03:58 crc kubenswrapper[4861]: I1003 14:03:58.220166 4861 scope.go:117] "RemoveContainer" containerID="b52a454d127d124d10680549c4755bea60436c80787cda5e54c997dd4d6f67e5" Oct 03 14:03:58 crc kubenswrapper[4861]: I1003 14:03:58.278735 4861 scope.go:117] "RemoveContainer" containerID="7aed167b995159e78a5930e172770220f5bac8d469b0f6e2586d96846bfc6b93" Oct 03 14:03:58 crc kubenswrapper[4861]: I1003 14:03:58.335994 4861 scope.go:117] "RemoveContainer" containerID="9daf7e884343bfddc03defa9cfeb515f90dae102ec52f788ed18250bb430cb2a" Oct 03 14:04:00 crc kubenswrapper[4861]: I1003 14:04:00.145007 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:04:00 crc kubenswrapper[4861]: I1003 14:04:00.145547 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:04:10 crc kubenswrapper[4861]: I1003 14:04:10.064160 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-867t8"] Oct 03 14:04:10 crc kubenswrapper[4861]: I1003 14:04:10.080822 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-867t8"] Oct 03 14:04:10 crc kubenswrapper[4861]: I1003 14:04:10.693135 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7c9c017-a9df-4899-81ec-1fc7181f2414" path="/var/lib/kubelet/pods/d7c9c017-a9df-4899-81ec-1fc7181f2414/volumes" Oct 03 14:04:30 crc kubenswrapper[4861]: I1003 14:04:30.145218 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:04:30 crc kubenswrapper[4861]: I1003 14:04:30.145900 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:04:32 crc kubenswrapper[4861]: I1003 14:04:32.111905 4861 generic.go:334] "Generic (PLEG): container finished" podID="fd61a6d8-4264-4d03-8891-c1bdf462fa7b" containerID="869119821ed8eabaa928ca855eb234159ae3db3875f343629daa2cde2e6fbdbc" exitCode=0 Oct 03 14:04:32 crc kubenswrapper[4861]: I1003 14:04:32.112142 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" event={"ID":"fd61a6d8-4264-4d03-8891-c1bdf462fa7b","Type":"ContainerDied","Data":"869119821ed8eabaa928ca855eb234159ae3db3875f343629daa2cde2e6fbdbc"} Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.586422 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.668641 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7zv4\" (UniqueName: \"kubernetes.io/projected/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-kube-api-access-z7zv4\") pod \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.668725 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-ssh-key\") pod \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.668754 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-inventory\") pod \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\" (UID: \"fd61a6d8-4264-4d03-8891-c1bdf462fa7b\") " Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.680433 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-kube-api-access-z7zv4" (OuterVolumeSpecName: "kube-api-access-z7zv4") pod "fd61a6d8-4264-4d03-8891-c1bdf462fa7b" (UID: "fd61a6d8-4264-4d03-8891-c1bdf462fa7b"). InnerVolumeSpecName "kube-api-access-z7zv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.713213 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fd61a6d8-4264-4d03-8891-c1bdf462fa7b" (UID: "fd61a6d8-4264-4d03-8891-c1bdf462fa7b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.726396 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-inventory" (OuterVolumeSpecName: "inventory") pod "fd61a6d8-4264-4d03-8891-c1bdf462fa7b" (UID: "fd61a6d8-4264-4d03-8891-c1bdf462fa7b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.771717 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7zv4\" (UniqueName: \"kubernetes.io/projected/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-kube-api-access-z7zv4\") on node \"crc\" DevicePath \"\"" Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.771765 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:04:33 crc kubenswrapper[4861]: I1003 14:04:33.771784 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd61a6d8-4264-4d03-8891-c1bdf462fa7b-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.131607 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" event={"ID":"fd61a6d8-4264-4d03-8891-c1bdf462fa7b","Type":"ContainerDied","Data":"a14c792e41cd20e274e6b50ae7a02b30ba3173b960f402f5dec2e608a89373fd"} Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.131644 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hk68t" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.131653 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a14c792e41cd20e274e6b50ae7a02b30ba3173b960f402f5dec2e608a89373fd" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.248001 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8"] Oct 03 14:04:34 crc kubenswrapper[4861]: E1003 14:04:34.248363 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd61a6d8-4264-4d03-8891-c1bdf462fa7b" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.248380 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd61a6d8-4264-4d03-8891-c1bdf462fa7b" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.248571 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd61a6d8-4264-4d03-8891-c1bdf462fa7b" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.249166 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.251805 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.256686 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.256956 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.265686 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.271386 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8"] Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.381660 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw2h5\" (UniqueName: \"kubernetes.io/projected/2b2349a3-d6ca-4e6f-a564-03dac17e4746-kube-api-access-hw2h5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.381727 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.381872 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.483032 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw2h5\" (UniqueName: \"kubernetes.io/projected/2b2349a3-d6ca-4e6f-a564-03dac17e4746-kube-api-access-hw2h5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.483080 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.483183 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.487136 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.489648 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.505203 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw2h5\" (UniqueName: \"kubernetes.io/projected/2b2349a3-d6ca-4e6f-a564-03dac17e4746-kube-api-access-hw2h5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-528b8\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:34 crc kubenswrapper[4861]: I1003 14:04:34.567283 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:04:35 crc kubenswrapper[4861]: I1003 14:04:35.114994 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8"] Oct 03 14:04:35 crc kubenswrapper[4861]: I1003 14:04:35.147007 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" event={"ID":"2b2349a3-d6ca-4e6f-a564-03dac17e4746","Type":"ContainerStarted","Data":"32e759a90a69b9cc454e2c288832c3ead1968de185381b57b4974145c1cf9adf"} Oct 03 14:04:36 crc kubenswrapper[4861]: I1003 14:04:36.154191 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" event={"ID":"2b2349a3-d6ca-4e6f-a564-03dac17e4746","Type":"ContainerStarted","Data":"38af5793b484cd447a5be592976c5f5c047cdbcf578335c095baff7b54b39577"} Oct 03 14:04:36 crc kubenswrapper[4861]: I1003 14:04:36.180627 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" podStartSLOduration=1.979397011 podStartE2EDuration="2.180600826s" podCreationTimestamp="2025-10-03 14:04:34 +0000 UTC" firstStartedPulling="2025-10-03 14:04:35.114921133 +0000 UTC m=+1989.112906180" lastFinishedPulling="2025-10-03 14:04:35.316124948 +0000 UTC m=+1989.314109995" observedRunningTime="2025-10-03 14:04:36.168350299 +0000 UTC m=+1990.166335416" watchObservedRunningTime="2025-10-03 14:04:36.180600826 +0000 UTC m=+1990.178585903" Oct 03 14:04:58 crc kubenswrapper[4861]: I1003 14:04:58.451853 4861 scope.go:117] "RemoveContainer" containerID="8ce9f374c14ab77ff7df53e963f5059ca15b3e8d1d02dbbbb6b06f2df3fbac1e" Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.145474 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.145924 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.145971 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.146698 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f064e66ff80057718d5d7200a474f2f2e7de2018d0db330fa44860780399accf"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.146755 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://f064e66ff80057718d5d7200a474f2f2e7de2018d0db330fa44860780399accf" gracePeriod=600 Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.359454 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="f064e66ff80057718d5d7200a474f2f2e7de2018d0db330fa44860780399accf" exitCode=0 Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.359516 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"f064e66ff80057718d5d7200a474f2f2e7de2018d0db330fa44860780399accf"} Oct 03 14:05:00 crc kubenswrapper[4861]: I1003 14:05:00.359785 4861 scope.go:117] "RemoveContainer" containerID="c1e256e5753a41524e35adb39d95dd19fa1e46e14c50fc9dfd4aef05090aab1c" Oct 03 14:05:01 crc kubenswrapper[4861]: I1003 14:05:01.371379 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c"} Oct 03 14:05:35 crc kubenswrapper[4861]: I1003 14:05:35.669389 4861 generic.go:334] "Generic (PLEG): container finished" podID="2b2349a3-d6ca-4e6f-a564-03dac17e4746" containerID="38af5793b484cd447a5be592976c5f5c047cdbcf578335c095baff7b54b39577" exitCode=2 Oct 03 14:05:35 crc kubenswrapper[4861]: I1003 14:05:35.669452 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" event={"ID":"2b2349a3-d6ca-4e6f-a564-03dac17e4746","Type":"ContainerDied","Data":"38af5793b484cd447a5be592976c5f5c047cdbcf578335c095baff7b54b39577"} Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.091630 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.159537 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hw2h5\" (UniqueName: \"kubernetes.io/projected/2b2349a3-d6ca-4e6f-a564-03dac17e4746-kube-api-access-hw2h5\") pod \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.159720 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-inventory\") pod \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.159756 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-ssh-key\") pod \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\" (UID: \"2b2349a3-d6ca-4e6f-a564-03dac17e4746\") " Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.166710 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b2349a3-d6ca-4e6f-a564-03dac17e4746-kube-api-access-hw2h5" (OuterVolumeSpecName: "kube-api-access-hw2h5") pod "2b2349a3-d6ca-4e6f-a564-03dac17e4746" (UID: "2b2349a3-d6ca-4e6f-a564-03dac17e4746"). InnerVolumeSpecName "kube-api-access-hw2h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.191186 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2b2349a3-d6ca-4e6f-a564-03dac17e4746" (UID: "2b2349a3-d6ca-4e6f-a564-03dac17e4746"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.213358 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-inventory" (OuterVolumeSpecName: "inventory") pod "2b2349a3-d6ca-4e6f-a564-03dac17e4746" (UID: "2b2349a3-d6ca-4e6f-a564-03dac17e4746"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.261988 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hw2h5\" (UniqueName: \"kubernetes.io/projected/2b2349a3-d6ca-4e6f-a564-03dac17e4746-kube-api-access-hw2h5\") on node \"crc\" DevicePath \"\"" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.262322 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.262342 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b2349a3-d6ca-4e6f-a564-03dac17e4746-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.694297 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" event={"ID":"2b2349a3-d6ca-4e6f-a564-03dac17e4746","Type":"ContainerDied","Data":"32e759a90a69b9cc454e2c288832c3ead1968de185381b57b4974145c1cf9adf"} Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.694532 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32e759a90a69b9cc454e2c288832c3ead1968de185381b57b4974145c1cf9adf" Oct 03 14:05:37 crc kubenswrapper[4861]: I1003 14:05:37.694672 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-528b8" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.044101 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r"] Oct 03 14:05:45 crc kubenswrapper[4861]: E1003 14:05:45.047140 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b2349a3-d6ca-4e6f-a564-03dac17e4746" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.047393 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b2349a3-d6ca-4e6f-a564-03dac17e4746" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.047789 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b2349a3-d6ca-4e6f-a564-03dac17e4746" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.048870 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.052791 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.053474 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.053598 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.054384 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.078745 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r"] Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.121272 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.121391 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.121755 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnpsn\" (UniqueName: \"kubernetes.io/projected/f353c9e7-1d8f-4084-b475-0c725858f034-kube-api-access-hnpsn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.224006 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnpsn\" (UniqueName: \"kubernetes.io/projected/f353c9e7-1d8f-4084-b475-0c725858f034-kube-api-access-hnpsn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.224130 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.224157 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.234381 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.235019 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.244492 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnpsn\" (UniqueName: \"kubernetes.io/projected/f353c9e7-1d8f-4084-b475-0c725858f034-kube-api-access-hnpsn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-smz4r\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.383057 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.927900 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r"] Oct 03 14:05:45 crc kubenswrapper[4861]: I1003 14:05:45.945711 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.595029 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b7r7k"] Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.597692 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.607250 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b7r7k"] Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.666901 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrx88\" (UniqueName: \"kubernetes.io/projected/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-kube-api-access-jrx88\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.666954 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-catalog-content\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.667290 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-utilities\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.769682 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-catalog-content\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.769853 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-utilities\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.770499 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-catalog-content\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.770692 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-utilities\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.770788 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrx88\" (UniqueName: \"kubernetes.io/projected/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-kube-api-access-jrx88\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.772903 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" event={"ID":"f353c9e7-1d8f-4084-b475-0c725858f034","Type":"ContainerStarted","Data":"cd4078b3bfa84b79ccf55a3f58e97235d6414aac7991a4d88ce560c2cf804ac5"} Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.772951 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" event={"ID":"f353c9e7-1d8f-4084-b475-0c725858f034","Type":"ContainerStarted","Data":"f0de9bb061f052e65f07f866befb0a89ea279fda991cf1a5f2d5d8b6104f0d32"} Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.790759 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrx88\" (UniqueName: \"kubernetes.io/projected/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-kube-api-access-jrx88\") pod \"redhat-operators-b7r7k\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:46 crc kubenswrapper[4861]: I1003 14:05:46.925047 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:47 crc kubenswrapper[4861]: I1003 14:05:47.183843 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" podStartSLOduration=1.9866994610000002 podStartE2EDuration="2.183827084s" podCreationTimestamp="2025-10-03 14:05:45 +0000 UTC" firstStartedPulling="2025-10-03 14:05:45.945387701 +0000 UTC m=+2059.943372748" lastFinishedPulling="2025-10-03 14:05:46.142515324 +0000 UTC m=+2060.140500371" observedRunningTime="2025-10-03 14:05:46.810786712 +0000 UTC m=+2060.808771779" watchObservedRunningTime="2025-10-03 14:05:47.183827084 +0000 UTC m=+2061.181812131" Oct 03 14:05:47 crc kubenswrapper[4861]: I1003 14:05:47.186909 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b7r7k"] Oct 03 14:05:47 crc kubenswrapper[4861]: I1003 14:05:47.782848 4861 generic.go:334] "Generic (PLEG): container finished" podID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerID="eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0" exitCode=0 Oct 03 14:05:47 crc kubenswrapper[4861]: I1003 14:05:47.783013 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerDied","Data":"eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0"} Oct 03 14:05:47 crc kubenswrapper[4861]: I1003 14:05:47.784170 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerStarted","Data":"294571779d541da525fb636c92c79937f90ed2a8c5617ffff4f656fac19239be"} Oct 03 14:05:48 crc kubenswrapper[4861]: I1003 14:05:48.795045 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerStarted","Data":"df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1"} Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.107257 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q8n9x"] Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.110471 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.166499 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q8n9x"] Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.171078 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s65k7\" (UniqueName: \"kubernetes.io/projected/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-kube-api-access-s65k7\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.171216 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-utilities\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.171343 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-catalog-content\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.273634 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-catalog-content\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.273751 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s65k7\" (UniqueName: \"kubernetes.io/projected/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-kube-api-access-s65k7\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.273801 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-utilities\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.274260 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-utilities\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.274308 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-catalog-content\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.305252 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s65k7\" (UniqueName: \"kubernetes.io/projected/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-kube-api-access-s65k7\") pod \"community-operators-q8n9x\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.427747 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:05:52 crc kubenswrapper[4861]: I1003 14:05:52.981693 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q8n9x"] Oct 03 14:05:53 crc kubenswrapper[4861]: I1003 14:05:53.846166 4861 generic.go:334] "Generic (PLEG): container finished" podID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerID="d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119" exitCode=0 Oct 03 14:05:53 crc kubenswrapper[4861]: I1003 14:05:53.846457 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerDied","Data":"d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119"} Oct 03 14:05:53 crc kubenswrapper[4861]: I1003 14:05:53.846484 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerStarted","Data":"a21fcad4393bea069f0c0af8a4307c02167f3f9ec18aaecd1249e297c28c8673"} Oct 03 14:05:54 crc kubenswrapper[4861]: I1003 14:05:54.854550 4861 generic.go:334] "Generic (PLEG): container finished" podID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerID="df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1" exitCode=0 Oct 03 14:05:54 crc kubenswrapper[4861]: I1003 14:05:54.854888 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerDied","Data":"df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1"} Oct 03 14:05:54 crc kubenswrapper[4861]: I1003 14:05:54.861028 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerStarted","Data":"fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b"} Oct 03 14:05:55 crc kubenswrapper[4861]: I1003 14:05:55.873202 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerStarted","Data":"3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776"} Oct 03 14:05:55 crc kubenswrapper[4861]: I1003 14:05:55.892629 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b7r7k" podStartSLOduration=2.289083032 podStartE2EDuration="9.892611678s" podCreationTimestamp="2025-10-03 14:05:46 +0000 UTC" firstStartedPulling="2025-10-03 14:05:47.784990061 +0000 UTC m=+2061.782975108" lastFinishedPulling="2025-10-03 14:05:55.388518707 +0000 UTC m=+2069.386503754" observedRunningTime="2025-10-03 14:05:55.890791909 +0000 UTC m=+2069.888776976" watchObservedRunningTime="2025-10-03 14:05:55.892611678 +0000 UTC m=+2069.890596725" Oct 03 14:05:56 crc kubenswrapper[4861]: I1003 14:05:56.925577 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:56 crc kubenswrapper[4861]: I1003 14:05:56.925971 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:05:57 crc kubenswrapper[4861]: I1003 14:05:57.889759 4861 generic.go:334] "Generic (PLEG): container finished" podID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerID="fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b" exitCode=0 Oct 03 14:05:57 crc kubenswrapper[4861]: I1003 14:05:57.890002 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerDied","Data":"fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b"} Oct 03 14:05:58 crc kubenswrapper[4861]: I1003 14:05:58.086110 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b7r7k" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="registry-server" probeResult="failure" output=< Oct 03 14:05:58 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:05:58 crc kubenswrapper[4861]: > Oct 03 14:05:58 crc kubenswrapper[4861]: I1003 14:05:58.900597 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerStarted","Data":"9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5"} Oct 03 14:05:58 crc kubenswrapper[4861]: I1003 14:05:58.930970 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q8n9x" podStartSLOduration=2.490849959 podStartE2EDuration="6.930946096s" podCreationTimestamp="2025-10-03 14:05:52 +0000 UTC" firstStartedPulling="2025-10-03 14:05:53.84892278 +0000 UTC m=+2067.846907827" lastFinishedPulling="2025-10-03 14:05:58.289018917 +0000 UTC m=+2072.287003964" observedRunningTime="2025-10-03 14:05:58.920265062 +0000 UTC m=+2072.918250119" watchObservedRunningTime="2025-10-03 14:05:58.930946096 +0000 UTC m=+2072.928931143" Oct 03 14:06:02 crc kubenswrapper[4861]: I1003 14:06:02.428292 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:06:02 crc kubenswrapper[4861]: I1003 14:06:02.429397 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:06:02 crc kubenswrapper[4861]: I1003 14:06:02.480526 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:06:03 crc kubenswrapper[4861]: I1003 14:06:03.995386 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:06:04 crc kubenswrapper[4861]: I1003 14:06:04.058444 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q8n9x"] Oct 03 14:06:05 crc kubenswrapper[4861]: I1003 14:06:05.956625 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q8n9x" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="registry-server" containerID="cri-o://9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5" gracePeriod=2 Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.432582 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.544552 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s65k7\" (UniqueName: \"kubernetes.io/projected/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-kube-api-access-s65k7\") pod \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.544672 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-utilities\") pod \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.544740 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-catalog-content\") pod \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\" (UID: \"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356\") " Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.547178 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-utilities" (OuterVolumeSpecName: "utilities") pod "ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" (UID: "ebb2b376-feb0-4cac-b6c1-6f4ce47b4356"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.552733 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-kube-api-access-s65k7" (OuterVolumeSpecName: "kube-api-access-s65k7") pod "ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" (UID: "ebb2b376-feb0-4cac-b6c1-6f4ce47b4356"). InnerVolumeSpecName "kube-api-access-s65k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.597115 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" (UID: "ebb2b376-feb0-4cac-b6c1-6f4ce47b4356"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.647513 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s65k7\" (UniqueName: \"kubernetes.io/projected/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-kube-api-access-s65k7\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.647542 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.647553 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.968559 4861 generic.go:334] "Generic (PLEG): container finished" podID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerID="9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5" exitCode=0 Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.968611 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerDied","Data":"9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5"} Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.968642 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8n9x" event={"ID":"ebb2b376-feb0-4cac-b6c1-6f4ce47b4356","Type":"ContainerDied","Data":"a21fcad4393bea069f0c0af8a4307c02167f3f9ec18aaecd1249e297c28c8673"} Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.968664 4861 scope.go:117] "RemoveContainer" containerID="9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5" Oct 03 14:06:06 crc kubenswrapper[4861]: I1003 14:06:06.968829 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8n9x" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.000010 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q8n9x"] Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.005973 4861 scope.go:117] "RemoveContainer" containerID="fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.010784 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q8n9x"] Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.040668 4861 scope.go:117] "RemoveContainer" containerID="d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.069093 4861 scope.go:117] "RemoveContainer" containerID="9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5" Oct 03 14:06:07 crc kubenswrapper[4861]: E1003 14:06:07.069558 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5\": container with ID starting with 9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5 not found: ID does not exist" containerID="9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.069595 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5"} err="failed to get container status \"9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5\": rpc error: code = NotFound desc = could not find container \"9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5\": container with ID starting with 9bca9acab6f5e6196281affe932232b7caeafcdce9c4b9931e4e277d13e08db5 not found: ID does not exist" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.069620 4861 scope.go:117] "RemoveContainer" containerID="fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b" Oct 03 14:06:07 crc kubenswrapper[4861]: E1003 14:06:07.070062 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b\": container with ID starting with fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b not found: ID does not exist" containerID="fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.070120 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b"} err="failed to get container status \"fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b\": rpc error: code = NotFound desc = could not find container \"fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b\": container with ID starting with fa3943318224383bb6b0361f9501143968ae86386f3bc72d41eff41f68d24c2b not found: ID does not exist" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.070156 4861 scope.go:117] "RemoveContainer" containerID="d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119" Oct 03 14:06:07 crc kubenswrapper[4861]: E1003 14:06:07.070574 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119\": container with ID starting with d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119 not found: ID does not exist" containerID="d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.070607 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119"} err="failed to get container status \"d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119\": rpc error: code = NotFound desc = could not find container \"d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119\": container with ID starting with d666ea40953db836536fa49ea6f0cb3d61e3ef191de08383200321240580d119 not found: ID does not exist" Oct 03 14:06:07 crc kubenswrapper[4861]: I1003 14:06:07.973151 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b7r7k" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="registry-server" probeResult="failure" output=< Oct 03 14:06:07 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:06:07 crc kubenswrapper[4861]: > Oct 03 14:06:08 crc kubenswrapper[4861]: I1003 14:06:08.692284 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" path="/var/lib/kubelet/pods/ebb2b376-feb0-4cac-b6c1-6f4ce47b4356/volumes" Oct 03 14:06:16 crc kubenswrapper[4861]: I1003 14:06:16.995556 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:06:17 crc kubenswrapper[4861]: I1003 14:06:17.055129 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:06:17 crc kubenswrapper[4861]: I1003 14:06:17.812780 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b7r7k"] Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.098325 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b7r7k" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="registry-server" containerID="cri-o://3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776" gracePeriod=2 Oct 03 14:06:18 crc kubenswrapper[4861]: E1003 14:06:18.362024 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod474c1fa4_e5b7_43cf_86d3_1acdca0cbbd9.slice/crio-conmon-3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod474c1fa4_e5b7_43cf_86d3_1acdca0cbbd9.slice/crio-3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776.scope\": RecentStats: unable to find data in memory cache]" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.591799 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.686577 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrx88\" (UniqueName: \"kubernetes.io/projected/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-kube-api-access-jrx88\") pod \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.686671 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-utilities\") pod \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.686698 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-catalog-content\") pod \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\" (UID: \"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9\") " Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.687373 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-utilities" (OuterVolumeSpecName: "utilities") pod "474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" (UID: "474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.698367 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.709299 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-kube-api-access-jrx88" (OuterVolumeSpecName: "kube-api-access-jrx88") pod "474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" (UID: "474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9"). InnerVolumeSpecName "kube-api-access-jrx88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.801608 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrx88\" (UniqueName: \"kubernetes.io/projected/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-kube-api-access-jrx88\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.802266 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" (UID: "474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:06:18 crc kubenswrapper[4861]: I1003 14:06:18.903820 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.111937 4861 generic.go:334] "Generic (PLEG): container finished" podID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerID="3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776" exitCode=0 Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.111983 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerDied","Data":"3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776"} Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.112013 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b7r7k" event={"ID":"474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9","Type":"ContainerDied","Data":"294571779d541da525fb636c92c79937f90ed2a8c5617ffff4f656fac19239be"} Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.112033 4861 scope.go:117] "RemoveContainer" containerID="3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.112190 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b7r7k" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.139269 4861 scope.go:117] "RemoveContainer" containerID="df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.157724 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b7r7k"] Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.167917 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b7r7k"] Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.184981 4861 scope.go:117] "RemoveContainer" containerID="eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.237658 4861 scope.go:117] "RemoveContainer" containerID="3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776" Oct 03 14:06:19 crc kubenswrapper[4861]: E1003 14:06:19.238058 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776\": container with ID starting with 3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776 not found: ID does not exist" containerID="3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.238094 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776"} err="failed to get container status \"3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776\": rpc error: code = NotFound desc = could not find container \"3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776\": container with ID starting with 3c950db61f958dbe81a7fa6056f9551f82d7f2346adc4783850b100d75bff776 not found: ID does not exist" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.238120 4861 scope.go:117] "RemoveContainer" containerID="df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1" Oct 03 14:06:19 crc kubenswrapper[4861]: E1003 14:06:19.239123 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1\": container with ID starting with df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1 not found: ID does not exist" containerID="df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.239160 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1"} err="failed to get container status \"df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1\": rpc error: code = NotFound desc = could not find container \"df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1\": container with ID starting with df569cb94f0a466612dfff6f680a442332cdc7b10eebc78b538ec2bcaa4da7e1 not found: ID does not exist" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.239181 4861 scope.go:117] "RemoveContainer" containerID="eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0" Oct 03 14:06:19 crc kubenswrapper[4861]: E1003 14:06:19.239464 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0\": container with ID starting with eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0 not found: ID does not exist" containerID="eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0" Oct 03 14:06:19 crc kubenswrapper[4861]: I1003 14:06:19.239490 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0"} err="failed to get container status \"eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0\": rpc error: code = NotFound desc = could not find container \"eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0\": container with ID starting with eab9609f8e23f755556422969c4da9617d5d3f3f10c4e61ef3b5af4d90f9a4d0 not found: ID does not exist" Oct 03 14:06:20 crc kubenswrapper[4861]: I1003 14:06:20.697190 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" path="/var/lib/kubelet/pods/474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9/volumes" Oct 03 14:06:40 crc kubenswrapper[4861]: I1003 14:06:40.359320 4861 generic.go:334] "Generic (PLEG): container finished" podID="f353c9e7-1d8f-4084-b475-0c725858f034" containerID="cd4078b3bfa84b79ccf55a3f58e97235d6414aac7991a4d88ce560c2cf804ac5" exitCode=0 Oct 03 14:06:40 crc kubenswrapper[4861]: I1003 14:06:40.359558 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" event={"ID":"f353c9e7-1d8f-4084-b475-0c725858f034","Type":"ContainerDied","Data":"cd4078b3bfa84b79ccf55a3f58e97235d6414aac7991a4d88ce560c2cf804ac5"} Oct 03 14:06:41 crc kubenswrapper[4861]: I1003 14:06:41.908118 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:06:41 crc kubenswrapper[4861]: I1003 14:06:41.983717 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnpsn\" (UniqueName: \"kubernetes.io/projected/f353c9e7-1d8f-4084-b475-0c725858f034-kube-api-access-hnpsn\") pod \"f353c9e7-1d8f-4084-b475-0c725858f034\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " Oct 03 14:06:41 crc kubenswrapper[4861]: I1003 14:06:41.983970 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-inventory\") pod \"f353c9e7-1d8f-4084-b475-0c725858f034\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " Oct 03 14:06:41 crc kubenswrapper[4861]: I1003 14:06:41.984136 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-ssh-key\") pod \"f353c9e7-1d8f-4084-b475-0c725858f034\" (UID: \"f353c9e7-1d8f-4084-b475-0c725858f034\") " Oct 03 14:06:41 crc kubenswrapper[4861]: I1003 14:06:41.989209 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f353c9e7-1d8f-4084-b475-0c725858f034-kube-api-access-hnpsn" (OuterVolumeSpecName: "kube-api-access-hnpsn") pod "f353c9e7-1d8f-4084-b475-0c725858f034" (UID: "f353c9e7-1d8f-4084-b475-0c725858f034"). InnerVolumeSpecName "kube-api-access-hnpsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.011197 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-inventory" (OuterVolumeSpecName: "inventory") pod "f353c9e7-1d8f-4084-b475-0c725858f034" (UID: "f353c9e7-1d8f-4084-b475-0c725858f034"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.031426 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f353c9e7-1d8f-4084-b475-0c725858f034" (UID: "f353c9e7-1d8f-4084-b475-0c725858f034"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.086189 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.086220 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f353c9e7-1d8f-4084-b475-0c725858f034-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.086309 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnpsn\" (UniqueName: \"kubernetes.io/projected/f353c9e7-1d8f-4084-b475-0c725858f034-kube-api-access-hnpsn\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.384843 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" event={"ID":"f353c9e7-1d8f-4084-b475-0c725858f034","Type":"ContainerDied","Data":"f0de9bb061f052e65f07f866befb0a89ea279fda991cf1a5f2d5d8b6104f0d32"} Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.384900 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-smz4r" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.384910 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0de9bb061f052e65f07f866befb0a89ea279fda991cf1a5f2d5d8b6104f0d32" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539152 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7qzsr"] Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539544 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="extract-content" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539566 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="extract-content" Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539580 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="extract-content" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539590 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="extract-content" Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539601 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f353c9e7-1d8f-4084-b475-0c725858f034" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539610 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f353c9e7-1d8f-4084-b475-0c725858f034" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539637 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="registry-server" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539646 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="registry-server" Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539653 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="extract-utilities" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539660 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="extract-utilities" Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539676 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="extract-utilities" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539683 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="extract-utilities" Oct 03 14:06:42 crc kubenswrapper[4861]: E1003 14:06:42.539707 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="registry-server" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539715 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="registry-server" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539923 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="474c1fa4-e5b7-43cf-86d3-1acdca0cbbd9" containerName="registry-server" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539949 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb2b376-feb0-4cac-b6c1-6f4ce47b4356" containerName="registry-server" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.539962 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f353c9e7-1d8f-4084-b475-0c725858f034" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.540917 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.544073 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.544171 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.544401 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.544560 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.551621 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7qzsr"] Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.607347 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.607511 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct28r\" (UniqueName: \"kubernetes.io/projected/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-kube-api-access-ct28r\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.607825 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.710357 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.710666 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.710814 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct28r\" (UniqueName: \"kubernetes.io/projected/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-kube-api-access-ct28r\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.724369 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.726676 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.735577 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct28r\" (UniqueName: \"kubernetes.io/projected/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-kube-api-access-ct28r\") pod \"ssh-known-hosts-edpm-deployment-7qzsr\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:42 crc kubenswrapper[4861]: I1003 14:06:42.901018 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:43 crc kubenswrapper[4861]: I1003 14:06:43.473677 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7qzsr"] Oct 03 14:06:43 crc kubenswrapper[4861]: W1003 14:06:43.490500 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8a2edd4_057a_4eca_948e_5c3eeb2a0550.slice/crio-2d73073ab3fae796f43c26b6670edafabf6b4667effa1e58e9cb5119b5d57d96 WatchSource:0}: Error finding container 2d73073ab3fae796f43c26b6670edafabf6b4667effa1e58e9cb5119b5d57d96: Status 404 returned error can't find the container with id 2d73073ab3fae796f43c26b6670edafabf6b4667effa1e58e9cb5119b5d57d96 Oct 03 14:06:44 crc kubenswrapper[4861]: I1003 14:06:44.404111 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" event={"ID":"e8a2edd4-057a-4eca-948e-5c3eeb2a0550","Type":"ContainerStarted","Data":"e2c46a7a504b742eae5f63c70f5bd9c022e18288fc2caa2deb2df48d8e9009a3"} Oct 03 14:06:44 crc kubenswrapper[4861]: I1003 14:06:44.404470 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" event={"ID":"e8a2edd4-057a-4eca-948e-5c3eeb2a0550","Type":"ContainerStarted","Data":"2d73073ab3fae796f43c26b6670edafabf6b4667effa1e58e9cb5119b5d57d96"} Oct 03 14:06:44 crc kubenswrapper[4861]: I1003 14:06:44.437904 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" podStartSLOduration=2.277991912 podStartE2EDuration="2.437878776s" podCreationTimestamp="2025-10-03 14:06:42 +0000 UTC" firstStartedPulling="2025-10-03 14:06:43.506867973 +0000 UTC m=+2117.504853020" lastFinishedPulling="2025-10-03 14:06:43.666754837 +0000 UTC m=+2117.664739884" observedRunningTime="2025-10-03 14:06:44.42786367 +0000 UTC m=+2118.425848747" watchObservedRunningTime="2025-10-03 14:06:44.437878776 +0000 UTC m=+2118.435863833" Oct 03 14:06:51 crc kubenswrapper[4861]: I1003 14:06:51.469845 4861 generic.go:334] "Generic (PLEG): container finished" podID="e8a2edd4-057a-4eca-948e-5c3eeb2a0550" containerID="e2c46a7a504b742eae5f63c70f5bd9c022e18288fc2caa2deb2df48d8e9009a3" exitCode=0 Oct 03 14:06:51 crc kubenswrapper[4861]: I1003 14:06:51.469899 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" event={"ID":"e8a2edd4-057a-4eca-948e-5c3eeb2a0550","Type":"ContainerDied","Data":"e2c46a7a504b742eae5f63c70f5bd9c022e18288fc2caa2deb2df48d8e9009a3"} Oct 03 14:06:52 crc kubenswrapper[4861]: I1003 14:06:52.948809 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.028075 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-ssh-key-openstack-edpm-ipam\") pod \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.028205 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-inventory-0\") pod \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.028419 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct28r\" (UniqueName: \"kubernetes.io/projected/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-kube-api-access-ct28r\") pod \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\" (UID: \"e8a2edd4-057a-4eca-948e-5c3eeb2a0550\") " Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.034442 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-kube-api-access-ct28r" (OuterVolumeSpecName: "kube-api-access-ct28r") pod "e8a2edd4-057a-4eca-948e-5c3eeb2a0550" (UID: "e8a2edd4-057a-4eca-948e-5c3eeb2a0550"). InnerVolumeSpecName "kube-api-access-ct28r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.056598 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "e8a2edd4-057a-4eca-948e-5c3eeb2a0550" (UID: "e8a2edd4-057a-4eca-948e-5c3eeb2a0550"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.063945 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e8a2edd4-057a-4eca-948e-5c3eeb2a0550" (UID: "e8a2edd4-057a-4eca-948e-5c3eeb2a0550"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.131223 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.131300 4861 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.131321 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct28r\" (UniqueName: \"kubernetes.io/projected/e8a2edd4-057a-4eca-948e-5c3eeb2a0550-kube-api-access-ct28r\") on node \"crc\" DevicePath \"\"" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.496145 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" event={"ID":"e8a2edd4-057a-4eca-948e-5c3eeb2a0550","Type":"ContainerDied","Data":"2d73073ab3fae796f43c26b6670edafabf6b4667effa1e58e9cb5119b5d57d96"} Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.496566 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d73073ab3fae796f43c26b6670edafabf6b4667effa1e58e9cb5119b5d57d96" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.496443 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7qzsr" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.739873 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx"] Oct 03 14:06:53 crc kubenswrapper[4861]: E1003 14:06:53.740312 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8a2edd4-057a-4eca-948e-5c3eeb2a0550" containerName="ssh-known-hosts-edpm-deployment" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.740335 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8a2edd4-057a-4eca-948e-5c3eeb2a0550" containerName="ssh-known-hosts-edpm-deployment" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.740567 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8a2edd4-057a-4eca-948e-5c3eeb2a0550" containerName="ssh-known-hosts-edpm-deployment" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.741329 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.745015 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.745186 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.746070 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.748595 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.787471 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx"] Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.848865 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.849047 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvwzb\" (UniqueName: \"kubernetes.io/projected/6877d996-5390-4bac-8c57-cd3f25a65554-kube-api-access-kvwzb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.849295 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.951145 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.951250 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvwzb\" (UniqueName: \"kubernetes.io/projected/6877d996-5390-4bac-8c57-cd3f25a65554-kube-api-access-kvwzb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.951312 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.960272 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.961221 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:53 crc kubenswrapper[4861]: I1003 14:06:53.979330 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvwzb\" (UniqueName: \"kubernetes.io/projected/6877d996-5390-4bac-8c57-cd3f25a65554-kube-api-access-kvwzb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-996sx\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:54 crc kubenswrapper[4861]: I1003 14:06:54.065737 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:06:54 crc kubenswrapper[4861]: I1003 14:06:54.395820 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx"] Oct 03 14:06:54 crc kubenswrapper[4861]: I1003 14:06:54.508510 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" event={"ID":"6877d996-5390-4bac-8c57-cd3f25a65554","Type":"ContainerStarted","Data":"4007437437e9e703efd5b5008a9c4a644bc2d4cacdaf8c534b22de4af306f4d8"} Oct 03 14:06:55 crc kubenswrapper[4861]: I1003 14:06:55.522792 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" event={"ID":"6877d996-5390-4bac-8c57-cd3f25a65554","Type":"ContainerStarted","Data":"23712df250228f59f16a2c5ec5122ab9dd4066da1d1fde503dea31d712889722"} Oct 03 14:06:55 crc kubenswrapper[4861]: I1003 14:06:55.565536 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" podStartSLOduration=2.382459905 podStartE2EDuration="2.565508304s" podCreationTimestamp="2025-10-03 14:06:53 +0000 UTC" firstStartedPulling="2025-10-03 14:06:54.410963048 +0000 UTC m=+2128.408948095" lastFinishedPulling="2025-10-03 14:06:54.594011407 +0000 UTC m=+2128.591996494" observedRunningTime="2025-10-03 14:06:55.558745704 +0000 UTC m=+2129.556730791" watchObservedRunningTime="2025-10-03 14:06:55.565508304 +0000 UTC m=+2129.563493391" Oct 03 14:07:00 crc kubenswrapper[4861]: I1003 14:07:00.145551 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:07:00 crc kubenswrapper[4861]: I1003 14:07:00.146130 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:07:04 crc kubenswrapper[4861]: I1003 14:07:04.615003 4861 generic.go:334] "Generic (PLEG): container finished" podID="6877d996-5390-4bac-8c57-cd3f25a65554" containerID="23712df250228f59f16a2c5ec5122ab9dd4066da1d1fde503dea31d712889722" exitCode=0 Oct 03 14:07:04 crc kubenswrapper[4861]: I1003 14:07:04.615740 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" event={"ID":"6877d996-5390-4bac-8c57-cd3f25a65554","Type":"ContainerDied","Data":"23712df250228f59f16a2c5ec5122ab9dd4066da1d1fde503dea31d712889722"} Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.036768 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.093795 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvwzb\" (UniqueName: \"kubernetes.io/projected/6877d996-5390-4bac-8c57-cd3f25a65554-kube-api-access-kvwzb\") pod \"6877d996-5390-4bac-8c57-cd3f25a65554\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.093996 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-inventory\") pod \"6877d996-5390-4bac-8c57-cd3f25a65554\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.094058 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-ssh-key\") pod \"6877d996-5390-4bac-8c57-cd3f25a65554\" (UID: \"6877d996-5390-4bac-8c57-cd3f25a65554\") " Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.104804 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6877d996-5390-4bac-8c57-cd3f25a65554-kube-api-access-kvwzb" (OuterVolumeSpecName: "kube-api-access-kvwzb") pod "6877d996-5390-4bac-8c57-cd3f25a65554" (UID: "6877d996-5390-4bac-8c57-cd3f25a65554"). InnerVolumeSpecName "kube-api-access-kvwzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.121197 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-inventory" (OuterVolumeSpecName: "inventory") pod "6877d996-5390-4bac-8c57-cd3f25a65554" (UID: "6877d996-5390-4bac-8c57-cd3f25a65554"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.127614 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6877d996-5390-4bac-8c57-cd3f25a65554" (UID: "6877d996-5390-4bac-8c57-cd3f25a65554"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.196019 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.196142 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6877d996-5390-4bac-8c57-cd3f25a65554-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.196209 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvwzb\" (UniqueName: \"kubernetes.io/projected/6877d996-5390-4bac-8c57-cd3f25a65554-kube-api-access-kvwzb\") on node \"crc\" DevicePath \"\"" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.635567 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" event={"ID":"6877d996-5390-4bac-8c57-cd3f25a65554","Type":"ContainerDied","Data":"4007437437e9e703efd5b5008a9c4a644bc2d4cacdaf8c534b22de4af306f4d8"} Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.635608 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4007437437e9e703efd5b5008a9c4a644bc2d4cacdaf8c534b22de4af306f4d8" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.635626 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-996sx" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.720991 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s"] Oct 03 14:07:06 crc kubenswrapper[4861]: E1003 14:07:06.721371 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6877d996-5390-4bac-8c57-cd3f25a65554" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.721387 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="6877d996-5390-4bac-8c57-cd3f25a65554" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.721600 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="6877d996-5390-4bac-8c57-cd3f25a65554" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.722215 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.724201 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.724257 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.724861 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.725116 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.740090 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s"] Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.807348 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.807608 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.807635 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n4mp\" (UniqueName: \"kubernetes.io/projected/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-kube-api-access-4n4mp\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.909091 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.909176 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.909200 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n4mp\" (UniqueName: \"kubernetes.io/projected/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-kube-api-access-4n4mp\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.914434 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.922923 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:06 crc kubenswrapper[4861]: I1003 14:07:06.931197 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n4mp\" (UniqueName: \"kubernetes.io/projected/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-kube-api-access-4n4mp\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-g587s\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:07 crc kubenswrapper[4861]: I1003 14:07:07.039696 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:07 crc kubenswrapper[4861]: I1003 14:07:07.564937 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s"] Oct 03 14:07:07 crc kubenswrapper[4861]: W1003 14:07:07.570326 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb68af2ce_5dae_47da_801b_a2ad6a6b8db1.slice/crio-8219cfefd30eaa9aae2392ebce582f1445a11cc73732f308db95bc188e5f4345 WatchSource:0}: Error finding container 8219cfefd30eaa9aae2392ebce582f1445a11cc73732f308db95bc188e5f4345: Status 404 returned error can't find the container with id 8219cfefd30eaa9aae2392ebce582f1445a11cc73732f308db95bc188e5f4345 Oct 03 14:07:07 crc kubenswrapper[4861]: I1003 14:07:07.644822 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" event={"ID":"b68af2ce-5dae-47da-801b-a2ad6a6b8db1","Type":"ContainerStarted","Data":"8219cfefd30eaa9aae2392ebce582f1445a11cc73732f308db95bc188e5f4345"} Oct 03 14:07:08 crc kubenswrapper[4861]: I1003 14:07:08.677339 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" event={"ID":"b68af2ce-5dae-47da-801b-a2ad6a6b8db1","Type":"ContainerStarted","Data":"46efef8d1b5b42a8809afdc20600ae5934a654a870c65cf26870972b97bf2ffb"} Oct 03 14:07:08 crc kubenswrapper[4861]: I1003 14:07:08.709956 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" podStartSLOduration=2.546100221 podStartE2EDuration="2.709918372s" podCreationTimestamp="2025-10-03 14:07:06 +0000 UTC" firstStartedPulling="2025-10-03 14:07:07.573630141 +0000 UTC m=+2141.571615198" lastFinishedPulling="2025-10-03 14:07:07.737448292 +0000 UTC m=+2141.735433349" observedRunningTime="2025-10-03 14:07:08.701429086 +0000 UTC m=+2142.699414143" watchObservedRunningTime="2025-10-03 14:07:08.709918372 +0000 UTC m=+2142.707903469" Oct 03 14:07:18 crc kubenswrapper[4861]: I1003 14:07:18.785934 4861 generic.go:334] "Generic (PLEG): container finished" podID="b68af2ce-5dae-47da-801b-a2ad6a6b8db1" containerID="46efef8d1b5b42a8809afdc20600ae5934a654a870c65cf26870972b97bf2ffb" exitCode=0 Oct 03 14:07:18 crc kubenswrapper[4861]: I1003 14:07:18.786056 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" event={"ID":"b68af2ce-5dae-47da-801b-a2ad6a6b8db1","Type":"ContainerDied","Data":"46efef8d1b5b42a8809afdc20600ae5934a654a870c65cf26870972b97bf2ffb"} Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.270655 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.441862 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-inventory\") pod \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.441893 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-ssh-key\") pod \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.441940 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n4mp\" (UniqueName: \"kubernetes.io/projected/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-kube-api-access-4n4mp\") pod \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\" (UID: \"b68af2ce-5dae-47da-801b-a2ad6a6b8db1\") " Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.455720 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-kube-api-access-4n4mp" (OuterVolumeSpecName: "kube-api-access-4n4mp") pod "b68af2ce-5dae-47da-801b-a2ad6a6b8db1" (UID: "b68af2ce-5dae-47da-801b-a2ad6a6b8db1"). InnerVolumeSpecName "kube-api-access-4n4mp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.474423 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b68af2ce-5dae-47da-801b-a2ad6a6b8db1" (UID: "b68af2ce-5dae-47da-801b-a2ad6a6b8db1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.503523 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-inventory" (OuterVolumeSpecName: "inventory") pod "b68af2ce-5dae-47da-801b-a2ad6a6b8db1" (UID: "b68af2ce-5dae-47da-801b-a2ad6a6b8db1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.544068 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.544103 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.544112 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n4mp\" (UniqueName: \"kubernetes.io/projected/b68af2ce-5dae-47da-801b-a2ad6a6b8db1-kube-api-access-4n4mp\") on node \"crc\" DevicePath \"\"" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.819886 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" event={"ID":"b68af2ce-5dae-47da-801b-a2ad6a6b8db1","Type":"ContainerDied","Data":"8219cfefd30eaa9aae2392ebce582f1445a11cc73732f308db95bc188e5f4345"} Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.819928 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8219cfefd30eaa9aae2392ebce582f1445a11cc73732f308db95bc188e5f4345" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.819945 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-g587s" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.911114 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f"] Oct 03 14:07:20 crc kubenswrapper[4861]: E1003 14:07:20.911593 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68af2ce-5dae-47da-801b-a2ad6a6b8db1" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.911620 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68af2ce-5dae-47da-801b-a2ad6a6b8db1" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.911848 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68af2ce-5dae-47da-801b-a2ad6a6b8db1" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.912594 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.922953 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.922971 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.922995 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.923123 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.923197 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.923426 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.923063 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.931467 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:07:20 crc kubenswrapper[4861]: I1003 14:07:20.931866 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f"] Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.054116 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.054665 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.054797 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.054893 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.054989 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055088 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055193 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9txbm\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-kube-api-access-9txbm\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055299 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055389 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055511 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055602 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055698 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055784 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.055873 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157689 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157784 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157837 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157874 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157915 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157941 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txbm\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-kube-api-access-9txbm\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.157971 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158007 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158063 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158098 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158138 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158181 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158203 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.158264 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.163335 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.163383 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.164626 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.165723 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.167724 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.168315 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.169078 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.169218 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.169932 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.169992 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.171834 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.172341 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.173881 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.188685 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9txbm\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-kube-api-access-9txbm\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-69f6f\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.231772 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.808592 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f"] Oct 03 14:07:21 crc kubenswrapper[4861]: I1003 14:07:21.830667 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" event={"ID":"e23c8e2c-00d8-43d0-a10a-5f7fef662315","Type":"ContainerStarted","Data":"4b010bb4376b2117141244c3770ca59ae62411d47197eea112f9ad9481401e08"} Oct 03 14:07:22 crc kubenswrapper[4861]: I1003 14:07:22.863045 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" event={"ID":"e23c8e2c-00d8-43d0-a10a-5f7fef662315","Type":"ContainerStarted","Data":"6f6d9e6c8d5cef2980cbaf701a312de2b10c4c85d7cad74ff4c09c0c48d9c7c3"} Oct 03 14:07:22 crc kubenswrapper[4861]: I1003 14:07:22.893827 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" podStartSLOduration=2.709631247 podStartE2EDuration="2.893810519s" podCreationTimestamp="2025-10-03 14:07:20 +0000 UTC" firstStartedPulling="2025-10-03 14:07:21.816765692 +0000 UTC m=+2155.814750739" lastFinishedPulling="2025-10-03 14:07:22.000944934 +0000 UTC m=+2155.998930011" observedRunningTime="2025-10-03 14:07:22.882692493 +0000 UTC m=+2156.880677540" watchObservedRunningTime="2025-10-03 14:07:22.893810519 +0000 UTC m=+2156.891795566" Oct 03 14:07:30 crc kubenswrapper[4861]: I1003 14:07:30.144833 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:07:30 crc kubenswrapper[4861]: I1003 14:07:30.145508 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:08:00 crc kubenswrapper[4861]: I1003 14:08:00.144717 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:08:00 crc kubenswrapper[4861]: I1003 14:08:00.145276 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:08:00 crc kubenswrapper[4861]: I1003 14:08:00.145330 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:08:00 crc kubenswrapper[4861]: I1003 14:08:00.146079 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:08:00 crc kubenswrapper[4861]: I1003 14:08:00.146148 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" gracePeriod=600 Oct 03 14:08:00 crc kubenswrapper[4861]: E1003 14:08:00.279792 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:08:01 crc kubenswrapper[4861]: I1003 14:08:01.229978 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" exitCode=0 Oct 03 14:08:01 crc kubenswrapper[4861]: I1003 14:08:01.230030 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c"} Oct 03 14:08:01 crc kubenswrapper[4861]: I1003 14:08:01.230073 4861 scope.go:117] "RemoveContainer" containerID="f064e66ff80057718d5d7200a474f2f2e7de2018d0db330fa44860780399accf" Oct 03 14:08:01 crc kubenswrapper[4861]: I1003 14:08:01.230809 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:08:01 crc kubenswrapper[4861]: E1003 14:08:01.231309 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:08:05 crc kubenswrapper[4861]: I1003 14:08:05.268924 4861 generic.go:334] "Generic (PLEG): container finished" podID="e23c8e2c-00d8-43d0-a10a-5f7fef662315" containerID="6f6d9e6c8d5cef2980cbaf701a312de2b10c4c85d7cad74ff4c09c0c48d9c7c3" exitCode=0 Oct 03 14:08:05 crc kubenswrapper[4861]: I1003 14:08:05.269064 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" event={"ID":"e23c8e2c-00d8-43d0-a10a-5f7fef662315","Type":"ContainerDied","Data":"6f6d9e6c8d5cef2980cbaf701a312de2b10c4c85d7cad74ff4c09c0c48d9c7c3"} Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.740921 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.838274 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.844507 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940508 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9txbm\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-kube-api-access-9txbm\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940604 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-inventory\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940653 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940710 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-ovn-default-certs-0\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940736 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-neutron-metadata-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940762 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940781 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-telemetry-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940830 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-bootstrap-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940846 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ovn-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940887 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ssh-key\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.940915 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-libvirt-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.941037 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-repo-setup-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.941077 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-nova-combined-ca-bundle\") pod \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\" (UID: \"e23c8e2c-00d8-43d0-a10a-5f7fef662315\") " Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.944127 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.945158 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.945794 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.947123 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.947959 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.948034 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.948377 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-kube-api-access-9txbm" (OuterVolumeSpecName: "kube-api-access-9txbm") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "kube-api-access-9txbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.948782 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.950955 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.950985 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.951047 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.951075 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.970551 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:06 crc kubenswrapper[4861]: I1003 14:08:06.977394 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-inventory" (OuterVolumeSpecName: "inventory") pod "e23c8e2c-00d8-43d0-a10a-5f7fef662315" (UID: "e23c8e2c-00d8-43d0-a10a-5f7fef662315"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046334 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046371 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046385 4861 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046399 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046413 4861 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046426 4861 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046438 4861 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046452 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046463 4861 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046474 4861 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046484 4861 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046496 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9txbm\" (UniqueName: \"kubernetes.io/projected/e23c8e2c-00d8-43d0-a10a-5f7fef662315-kube-api-access-9txbm\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.046506 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e23c8e2c-00d8-43d0-a10a-5f7fef662315-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.291461 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.291412 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-69f6f" event={"ID":"e23c8e2c-00d8-43d0-a10a-5f7fef662315","Type":"ContainerDied","Data":"4b010bb4376b2117141244c3770ca59ae62411d47197eea112f9ad9481401e08"} Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.291715 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b010bb4376b2117141244c3770ca59ae62411d47197eea112f9ad9481401e08" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.453944 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq"] Oct 03 14:08:07 crc kubenswrapper[4861]: E1003 14:08:07.454339 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e23c8e2c-00d8-43d0-a10a-5f7fef662315" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.454356 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e23c8e2c-00d8-43d0-a10a-5f7fef662315" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.454535 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e23c8e2c-00d8-43d0-a10a-5f7fef662315" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.455113 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.458504 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.458870 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.459029 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.459773 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.462436 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.487348 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq"] Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.555482 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.555543 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/defed5cd-53ca-4e60-af05-a4c425abbf60-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.555582 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45bm9\" (UniqueName: \"kubernetes.io/projected/defed5cd-53ca-4e60-af05-a4c425abbf60-kube-api-access-45bm9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.555714 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.555762 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.657318 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.657389 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/defed5cd-53ca-4e60-af05-a4c425abbf60-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.657431 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45bm9\" (UniqueName: \"kubernetes.io/projected/defed5cd-53ca-4e60-af05-a4c425abbf60-kube-api-access-45bm9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.657517 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.657566 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.660083 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/defed5cd-53ca-4e60-af05-a4c425abbf60-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.661695 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.662654 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.668616 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.687442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45bm9\" (UniqueName: \"kubernetes.io/projected/defed5cd-53ca-4e60-af05-a4c425abbf60-kube-api-access-45bm9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7c2gq\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:07 crc kubenswrapper[4861]: I1003 14:08:07.778690 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:08:08 crc kubenswrapper[4861]: I1003 14:08:08.319588 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq"] Oct 03 14:08:08 crc kubenswrapper[4861]: W1003 14:08:08.327384 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddefed5cd_53ca_4e60_af05_a4c425abbf60.slice/crio-44e40c064575ab3f9e3b4a4701c49bf1919f30b4d75163818875b2a96924a037 WatchSource:0}: Error finding container 44e40c064575ab3f9e3b4a4701c49bf1919f30b4d75163818875b2a96924a037: Status 404 returned error can't find the container with id 44e40c064575ab3f9e3b4a4701c49bf1919f30b4d75163818875b2a96924a037 Oct 03 14:08:09 crc kubenswrapper[4861]: I1003 14:08:09.312784 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" event={"ID":"defed5cd-53ca-4e60-af05-a4c425abbf60","Type":"ContainerStarted","Data":"92221765df2efa9e9789e58a264e075ceebf37e2a2e0f698255acc079b117e2a"} Oct 03 14:08:09 crc kubenswrapper[4861]: I1003 14:08:09.313063 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" event={"ID":"defed5cd-53ca-4e60-af05-a4c425abbf60","Type":"ContainerStarted","Data":"44e40c064575ab3f9e3b4a4701c49bf1919f30b4d75163818875b2a96924a037"} Oct 03 14:08:09 crc kubenswrapper[4861]: I1003 14:08:09.344163 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" podStartSLOduration=2.117295215 podStartE2EDuration="2.34414622s" podCreationTimestamp="2025-10-03 14:08:07 +0000 UTC" firstStartedPulling="2025-10-03 14:08:08.329535392 +0000 UTC m=+2202.327520439" lastFinishedPulling="2025-10-03 14:08:08.556386397 +0000 UTC m=+2202.554371444" observedRunningTime="2025-10-03 14:08:09.336104627 +0000 UTC m=+2203.334089674" watchObservedRunningTime="2025-10-03 14:08:09.34414622 +0000 UTC m=+2203.342131267" Oct 03 14:08:14 crc kubenswrapper[4861]: I1003 14:08:14.681893 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:08:14 crc kubenswrapper[4861]: E1003 14:08:14.682742 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:08:28 crc kubenswrapper[4861]: I1003 14:08:28.681664 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:08:28 crc kubenswrapper[4861]: E1003 14:08:28.682455 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:08:40 crc kubenswrapper[4861]: I1003 14:08:40.681404 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:08:40 crc kubenswrapper[4861]: E1003 14:08:40.682254 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.129790 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jk5g7"] Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.133562 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.142546 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jk5g7"] Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.216002 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-catalog-content\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.216433 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-utilities\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.216510 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq46f\" (UniqueName: \"kubernetes.io/projected/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-kube-api-access-mq46f\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.317680 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-utilities\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.317757 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq46f\" (UniqueName: \"kubernetes.io/projected/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-kube-api-access-mq46f\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.317814 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-catalog-content\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.318370 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-utilities\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.318445 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-catalog-content\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.341518 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq46f\" (UniqueName: \"kubernetes.io/projected/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-kube-api-access-mq46f\") pod \"redhat-marketplace-jk5g7\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.459072 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:44 crc kubenswrapper[4861]: I1003 14:08:44.985517 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jk5g7"] Oct 03 14:08:45 crc kubenswrapper[4861]: I1003 14:08:45.668032 4861 generic.go:334] "Generic (PLEG): container finished" podID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerID="7b1d06f0c209da22a57efe0958552494f5a606684c554a2a45de57439c52c8d1" exitCode=0 Oct 03 14:08:45 crc kubenswrapper[4861]: I1003 14:08:45.668140 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerDied","Data":"7b1d06f0c209da22a57efe0958552494f5a606684c554a2a45de57439c52c8d1"} Oct 03 14:08:45 crc kubenswrapper[4861]: I1003 14:08:45.668403 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerStarted","Data":"2c483496b3aafed4b5f2bc6b2e143d9e9c9477727a3a4dbb6b34e287160c767e"} Oct 03 14:08:46 crc kubenswrapper[4861]: I1003 14:08:46.717669 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerStarted","Data":"ad8e9b43f938b7f3a568ec17e7d7376229c33a6bbbd2995bd77ed0b6d1a16a38"} Oct 03 14:08:47 crc kubenswrapper[4861]: I1003 14:08:47.691489 4861 generic.go:334] "Generic (PLEG): container finished" podID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerID="ad8e9b43f938b7f3a568ec17e7d7376229c33a6bbbd2995bd77ed0b6d1a16a38" exitCode=0 Oct 03 14:08:47 crc kubenswrapper[4861]: I1003 14:08:47.691550 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerDied","Data":"ad8e9b43f938b7f3a568ec17e7d7376229c33a6bbbd2995bd77ed0b6d1a16a38"} Oct 03 14:08:48 crc kubenswrapper[4861]: I1003 14:08:48.701149 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerStarted","Data":"9a3ef7c7b19bcb07ae247b8213952c87746868ff84971928a06ff6ba81d6836e"} Oct 03 14:08:48 crc kubenswrapper[4861]: I1003 14:08:48.726640 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jk5g7" podStartSLOduration=2.220335646 podStartE2EDuration="4.726625043s" podCreationTimestamp="2025-10-03 14:08:44 +0000 UTC" firstStartedPulling="2025-10-03 14:08:45.670582334 +0000 UTC m=+2239.668567381" lastFinishedPulling="2025-10-03 14:08:48.176871731 +0000 UTC m=+2242.174856778" observedRunningTime="2025-10-03 14:08:48.721150638 +0000 UTC m=+2242.719135685" watchObservedRunningTime="2025-10-03 14:08:48.726625043 +0000 UTC m=+2242.724610090" Oct 03 14:08:52 crc kubenswrapper[4861]: I1003 14:08:52.681271 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:08:52 crc kubenswrapper[4861]: E1003 14:08:52.683290 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:08:54 crc kubenswrapper[4861]: I1003 14:08:54.459908 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:54 crc kubenswrapper[4861]: I1003 14:08:54.459958 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:54 crc kubenswrapper[4861]: I1003 14:08:54.506530 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:54 crc kubenswrapper[4861]: I1003 14:08:54.790869 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:54 crc kubenswrapper[4861]: I1003 14:08:54.835497 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jk5g7"] Oct 03 14:08:56 crc kubenswrapper[4861]: I1003 14:08:56.768566 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jk5g7" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="registry-server" containerID="cri-o://9a3ef7c7b19bcb07ae247b8213952c87746868ff84971928a06ff6ba81d6836e" gracePeriod=2 Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.780269 4861 generic.go:334] "Generic (PLEG): container finished" podID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerID="9a3ef7c7b19bcb07ae247b8213952c87746868ff84971928a06ff6ba81d6836e" exitCode=0 Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.780583 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerDied","Data":"9a3ef7c7b19bcb07ae247b8213952c87746868ff84971928a06ff6ba81d6836e"} Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.932505 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.983750 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-utilities\") pod \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.983870 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-catalog-content\") pod \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.984009 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq46f\" (UniqueName: \"kubernetes.io/projected/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-kube-api-access-mq46f\") pod \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\" (UID: \"f0dc90f4-0c04-4614-90f5-fb87f9f5826f\") " Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.984666 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-utilities" (OuterVolumeSpecName: "utilities") pod "f0dc90f4-0c04-4614-90f5-fb87f9f5826f" (UID: "f0dc90f4-0c04-4614-90f5-fb87f9f5826f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.996772 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-kube-api-access-mq46f" (OuterVolumeSpecName: "kube-api-access-mq46f") pod "f0dc90f4-0c04-4614-90f5-fb87f9f5826f" (UID: "f0dc90f4-0c04-4614-90f5-fb87f9f5826f"). InnerVolumeSpecName "kube-api-access-mq46f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:08:57 crc kubenswrapper[4861]: I1003 14:08:57.997371 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0dc90f4-0c04-4614-90f5-fb87f9f5826f" (UID: "f0dc90f4-0c04-4614-90f5-fb87f9f5826f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.086587 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.086920 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.086940 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq46f\" (UniqueName: \"kubernetes.io/projected/f0dc90f4-0c04-4614-90f5-fb87f9f5826f-kube-api-access-mq46f\") on node \"crc\" DevicePath \"\"" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.793073 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jk5g7" event={"ID":"f0dc90f4-0c04-4614-90f5-fb87f9f5826f","Type":"ContainerDied","Data":"2c483496b3aafed4b5f2bc6b2e143d9e9c9477727a3a4dbb6b34e287160c767e"} Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.793128 4861 scope.go:117] "RemoveContainer" containerID="9a3ef7c7b19bcb07ae247b8213952c87746868ff84971928a06ff6ba81d6836e" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.793161 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jk5g7" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.820834 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jk5g7"] Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.828189 4861 scope.go:117] "RemoveContainer" containerID="ad8e9b43f938b7f3a568ec17e7d7376229c33a6bbbd2995bd77ed0b6d1a16a38" Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.828590 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jk5g7"] Oct 03 14:08:58 crc kubenswrapper[4861]: I1003 14:08:58.850510 4861 scope.go:117] "RemoveContainer" containerID="7b1d06f0c209da22a57efe0958552494f5a606684c554a2a45de57439c52c8d1" Oct 03 14:09:00 crc kubenswrapper[4861]: I1003 14:09:00.690561 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" path="/var/lib/kubelet/pods/f0dc90f4-0c04-4614-90f5-fb87f9f5826f/volumes" Oct 03 14:09:05 crc kubenswrapper[4861]: I1003 14:09:05.681950 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:09:05 crc kubenswrapper[4861]: E1003 14:09:05.683025 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:09:17 crc kubenswrapper[4861]: I1003 14:09:17.967179 4861 generic.go:334] "Generic (PLEG): container finished" podID="defed5cd-53ca-4e60-af05-a4c425abbf60" containerID="92221765df2efa9e9789e58a264e075ceebf37e2a2e0f698255acc079b117e2a" exitCode=0 Oct 03 14:09:17 crc kubenswrapper[4861]: I1003 14:09:17.967471 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" event={"ID":"defed5cd-53ca-4e60-af05-a4c425abbf60","Type":"ContainerDied","Data":"92221765df2efa9e9789e58a264e075ceebf37e2a2e0f698255acc079b117e2a"} Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.412769 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.518213 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/defed5cd-53ca-4e60-af05-a4c425abbf60-ovncontroller-config-0\") pod \"defed5cd-53ca-4e60-af05-a4c425abbf60\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.518335 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ovn-combined-ca-bundle\") pod \"defed5cd-53ca-4e60-af05-a4c425abbf60\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.519064 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45bm9\" (UniqueName: \"kubernetes.io/projected/defed5cd-53ca-4e60-af05-a4c425abbf60-kube-api-access-45bm9\") pod \"defed5cd-53ca-4e60-af05-a4c425abbf60\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.519188 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-inventory\") pod \"defed5cd-53ca-4e60-af05-a4c425abbf60\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.519326 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ssh-key\") pod \"defed5cd-53ca-4e60-af05-a4c425abbf60\" (UID: \"defed5cd-53ca-4e60-af05-a4c425abbf60\") " Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.529884 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "defed5cd-53ca-4e60-af05-a4c425abbf60" (UID: "defed5cd-53ca-4e60-af05-a4c425abbf60"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.529948 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/defed5cd-53ca-4e60-af05-a4c425abbf60-kube-api-access-45bm9" (OuterVolumeSpecName: "kube-api-access-45bm9") pod "defed5cd-53ca-4e60-af05-a4c425abbf60" (UID: "defed5cd-53ca-4e60-af05-a4c425abbf60"). InnerVolumeSpecName "kube-api-access-45bm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.545862 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/defed5cd-53ca-4e60-af05-a4c425abbf60-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "defed5cd-53ca-4e60-af05-a4c425abbf60" (UID: "defed5cd-53ca-4e60-af05-a4c425abbf60"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.550349 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-inventory" (OuterVolumeSpecName: "inventory") pod "defed5cd-53ca-4e60-af05-a4c425abbf60" (UID: "defed5cd-53ca-4e60-af05-a4c425abbf60"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.558310 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "defed5cd-53ca-4e60-af05-a4c425abbf60" (UID: "defed5cd-53ca-4e60-af05-a4c425abbf60"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.621481 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.621537 4861 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/defed5cd-53ca-4e60-af05-a4c425abbf60-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.621552 4861 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.621564 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45bm9\" (UniqueName: \"kubernetes.io/projected/defed5cd-53ca-4e60-af05-a4c425abbf60-kube-api-access-45bm9\") on node \"crc\" DevicePath \"\"" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.621579 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/defed5cd-53ca-4e60-af05-a4c425abbf60-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.681074 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:09:19 crc kubenswrapper[4861]: E1003 14:09:19.681484 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.990688 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" event={"ID":"defed5cd-53ca-4e60-af05-a4c425abbf60","Type":"ContainerDied","Data":"44e40c064575ab3f9e3b4a4701c49bf1919f30b4d75163818875b2a96924a037"} Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.990977 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44e40c064575ab3f9e3b4a4701c49bf1919f30b4d75163818875b2a96924a037" Oct 03 14:09:19 crc kubenswrapper[4861]: I1003 14:09:19.990753 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7c2gq" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070206 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2"] Oct 03 14:09:20 crc kubenswrapper[4861]: E1003 14:09:20.070621 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="extract-content" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070640 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="extract-content" Oct 03 14:09:20 crc kubenswrapper[4861]: E1003 14:09:20.070676 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="registry-server" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070684 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="registry-server" Oct 03 14:09:20 crc kubenswrapper[4861]: E1003 14:09:20.070717 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="extract-utilities" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070726 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="extract-utilities" Oct 03 14:09:20 crc kubenswrapper[4861]: E1003 14:09:20.070739 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="defed5cd-53ca-4e60-af05-a4c425abbf60" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070746 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="defed5cd-53ca-4e60-af05-a4c425abbf60" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070947 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0dc90f4-0c04-4614-90f5-fb87f9f5826f" containerName="registry-server" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.070972 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="defed5cd-53ca-4e60-af05-a4c425abbf60" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.071609 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.083577 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2"] Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.085838 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.085915 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.086197 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.086276 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.086404 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.086555 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.131371 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.131478 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.131564 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dj9r\" (UniqueName: \"kubernetes.io/projected/5a870822-9c29-4acb-b63c-2ff86a95a9fc-kube-api-access-8dj9r\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.131598 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.131626 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.131736 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.233759 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.233830 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.233883 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dj9r\" (UniqueName: \"kubernetes.io/projected/5a870822-9c29-4acb-b63c-2ff86a95a9fc-kube-api-access-8dj9r\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.233909 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.233931 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.233980 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.237096 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.237152 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.237712 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.237915 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.238353 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.252849 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dj9r\" (UniqueName: \"kubernetes.io/projected/5a870822-9c29-4acb-b63c-2ff86a95a9fc-kube-api-access-8dj9r\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.387270 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.905805 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2"] Oct 03 14:09:20 crc kubenswrapper[4861]: I1003 14:09:20.998449 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" event={"ID":"5a870822-9c29-4acb-b63c-2ff86a95a9fc","Type":"ContainerStarted","Data":"9a516491d3ca310edea77ca0d4970a43473188b6c035eeab5cdc5beefdb906d3"} Oct 03 14:09:22 crc kubenswrapper[4861]: I1003 14:09:22.009701 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" event={"ID":"5a870822-9c29-4acb-b63c-2ff86a95a9fc","Type":"ContainerStarted","Data":"5b1e6f81e68c6ebf94af9dd086c4e0ddafd34a3a85ad00dcf8b6bc65c360ad76"} Oct 03 14:09:22 crc kubenswrapper[4861]: I1003 14:09:22.033079 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" podStartSLOduration=1.651509936 podStartE2EDuration="2.033052774s" podCreationTimestamp="2025-10-03 14:09:20 +0000 UTC" firstStartedPulling="2025-10-03 14:09:20.909968359 +0000 UTC m=+2274.907953406" lastFinishedPulling="2025-10-03 14:09:21.291511157 +0000 UTC m=+2275.289496244" observedRunningTime="2025-10-03 14:09:22.029624593 +0000 UTC m=+2276.027609970" watchObservedRunningTime="2025-10-03 14:09:22.033052774 +0000 UTC m=+2276.031037811" Oct 03 14:09:34 crc kubenswrapper[4861]: I1003 14:09:34.695710 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:09:34 crc kubenswrapper[4861]: E1003 14:09:34.696694 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.681823 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:09:47 crc kubenswrapper[4861]: E1003 14:09:47.683486 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.874872 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jcbr2"] Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.877978 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.894426 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jcbr2"] Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.988718 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-utilities\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.988770 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-catalog-content\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:47 crc kubenswrapper[4861]: I1003 14:09:47.988857 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgqdj\" (UniqueName: \"kubernetes.io/projected/7413195b-db31-4d9f-b262-bb9d86526727-kube-api-access-rgqdj\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.091698 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-utilities\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.091737 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-catalog-content\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.091794 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgqdj\" (UniqueName: \"kubernetes.io/projected/7413195b-db31-4d9f-b262-bb9d86526727-kube-api-access-rgqdj\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.092281 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-catalog-content\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.092328 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-utilities\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.124087 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgqdj\" (UniqueName: \"kubernetes.io/projected/7413195b-db31-4d9f-b262-bb9d86526727-kube-api-access-rgqdj\") pod \"certified-operators-jcbr2\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.206265 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:48 crc kubenswrapper[4861]: I1003 14:09:48.773463 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jcbr2"] Oct 03 14:09:49 crc kubenswrapper[4861]: I1003 14:09:49.361770 4861 generic.go:334] "Generic (PLEG): container finished" podID="7413195b-db31-4d9f-b262-bb9d86526727" containerID="bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f" exitCode=0 Oct 03 14:09:49 crc kubenswrapper[4861]: I1003 14:09:49.361843 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerDied","Data":"bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f"} Oct 03 14:09:49 crc kubenswrapper[4861]: I1003 14:09:49.362044 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerStarted","Data":"912adeadc9efc7f4bb0daea47b961077647087a860eee2c6a3550f928c1e34be"} Oct 03 14:09:51 crc kubenswrapper[4861]: I1003 14:09:51.380403 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerStarted","Data":"b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516"} Oct 03 14:09:52 crc kubenswrapper[4861]: I1003 14:09:52.392694 4861 generic.go:334] "Generic (PLEG): container finished" podID="7413195b-db31-4d9f-b262-bb9d86526727" containerID="b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516" exitCode=0 Oct 03 14:09:52 crc kubenswrapper[4861]: I1003 14:09:52.392762 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerDied","Data":"b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516"} Oct 03 14:09:53 crc kubenswrapper[4861]: I1003 14:09:53.408253 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerStarted","Data":"46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b"} Oct 03 14:09:53 crc kubenswrapper[4861]: I1003 14:09:53.438976 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jcbr2" podStartSLOduration=2.8884936679999997 podStartE2EDuration="6.438955611s" podCreationTimestamp="2025-10-03 14:09:47 +0000 UTC" firstStartedPulling="2025-10-03 14:09:49.364168294 +0000 UTC m=+2303.362153341" lastFinishedPulling="2025-10-03 14:09:52.914630227 +0000 UTC m=+2306.912615284" observedRunningTime="2025-10-03 14:09:53.429597764 +0000 UTC m=+2307.427582821" watchObservedRunningTime="2025-10-03 14:09:53.438955611 +0000 UTC m=+2307.436940658" Oct 03 14:09:58 crc kubenswrapper[4861]: I1003 14:09:58.206896 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:58 crc kubenswrapper[4861]: I1003 14:09:58.207458 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:58 crc kubenswrapper[4861]: I1003 14:09:58.259201 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:58 crc kubenswrapper[4861]: I1003 14:09:58.519463 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:09:58 crc kubenswrapper[4861]: I1003 14:09:58.570102 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jcbr2"] Oct 03 14:10:00 crc kubenswrapper[4861]: I1003 14:10:00.475349 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jcbr2" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="registry-server" containerID="cri-o://46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b" gracePeriod=2 Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.043622 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.160021 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgqdj\" (UniqueName: \"kubernetes.io/projected/7413195b-db31-4d9f-b262-bb9d86526727-kube-api-access-rgqdj\") pod \"7413195b-db31-4d9f-b262-bb9d86526727\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.160093 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-catalog-content\") pod \"7413195b-db31-4d9f-b262-bb9d86526727\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.160270 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-utilities\") pod \"7413195b-db31-4d9f-b262-bb9d86526727\" (UID: \"7413195b-db31-4d9f-b262-bb9d86526727\") " Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.161033 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-utilities" (OuterVolumeSpecName: "utilities") pod "7413195b-db31-4d9f-b262-bb9d86526727" (UID: "7413195b-db31-4d9f-b262-bb9d86526727"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.169991 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7413195b-db31-4d9f-b262-bb9d86526727-kube-api-access-rgqdj" (OuterVolumeSpecName: "kube-api-access-rgqdj") pod "7413195b-db31-4d9f-b262-bb9d86526727" (UID: "7413195b-db31-4d9f-b262-bb9d86526727"). InnerVolumeSpecName "kube-api-access-rgqdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.211067 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7413195b-db31-4d9f-b262-bb9d86526727" (UID: "7413195b-db31-4d9f-b262-bb9d86526727"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.262739 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgqdj\" (UniqueName: \"kubernetes.io/projected/7413195b-db31-4d9f-b262-bb9d86526727-kube-api-access-rgqdj\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.262782 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.262795 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7413195b-db31-4d9f-b262-bb9d86526727-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.488686 4861 generic.go:334] "Generic (PLEG): container finished" podID="7413195b-db31-4d9f-b262-bb9d86526727" containerID="46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b" exitCode=0 Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.488770 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcbr2" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.488777 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerDied","Data":"46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b"} Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.490383 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcbr2" event={"ID":"7413195b-db31-4d9f-b262-bb9d86526727","Type":"ContainerDied","Data":"912adeadc9efc7f4bb0daea47b961077647087a860eee2c6a3550f928c1e34be"} Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.490403 4861 scope.go:117] "RemoveContainer" containerID="46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.521172 4861 scope.go:117] "RemoveContainer" containerID="b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.570025 4861 scope.go:117] "RemoveContainer" containerID="bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.617928 4861 scope.go:117] "RemoveContainer" containerID="46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b" Oct 03 14:10:01 crc kubenswrapper[4861]: E1003 14:10:01.618600 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b\": container with ID starting with 46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b not found: ID does not exist" containerID="46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.618648 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b"} err="failed to get container status \"46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b\": rpc error: code = NotFound desc = could not find container \"46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b\": container with ID starting with 46c96a24b49dd21002c01e3771c221b4cc63e0efbdcb1ecd3838b78dc57fbd8b not found: ID does not exist" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.618676 4861 scope.go:117] "RemoveContainer" containerID="b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516" Oct 03 14:10:01 crc kubenswrapper[4861]: E1003 14:10:01.619020 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516\": container with ID starting with b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516 not found: ID does not exist" containerID="b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.619070 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516"} err="failed to get container status \"b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516\": rpc error: code = NotFound desc = could not find container \"b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516\": container with ID starting with b3fc9bd24fd256ad88a0e40f0f2bc75166d6b4cdeeda683857331a41ef617516 not found: ID does not exist" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.619107 4861 scope.go:117] "RemoveContainer" containerID="bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f" Oct 03 14:10:01 crc kubenswrapper[4861]: E1003 14:10:01.619437 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f\": container with ID starting with bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f not found: ID does not exist" containerID="bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.619574 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f"} err="failed to get container status \"bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f\": rpc error: code = NotFound desc = could not find container \"bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f\": container with ID starting with bccad0284f6f319ed14043bfec4223cdcc199431cd472a79bc34417fe510727f not found: ID does not exist" Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.683676 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jcbr2"] Oct 03 14:10:01 crc kubenswrapper[4861]: I1003 14:10:01.693002 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jcbr2"] Oct 03 14:10:02 crc kubenswrapper[4861]: I1003 14:10:02.680877 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:10:02 crc kubenswrapper[4861]: E1003 14:10:02.681430 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:10:02 crc kubenswrapper[4861]: I1003 14:10:02.691745 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7413195b-db31-4d9f-b262-bb9d86526727" path="/var/lib/kubelet/pods/7413195b-db31-4d9f-b262-bb9d86526727/volumes" Oct 03 14:10:14 crc kubenswrapper[4861]: I1003 14:10:14.611803 4861 generic.go:334] "Generic (PLEG): container finished" podID="5a870822-9c29-4acb-b63c-2ff86a95a9fc" containerID="5b1e6f81e68c6ebf94af9dd086c4e0ddafd34a3a85ad00dcf8b6bc65c360ad76" exitCode=0 Oct 03 14:10:14 crc kubenswrapper[4861]: I1003 14:10:14.611961 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" event={"ID":"5a870822-9c29-4acb-b63c-2ff86a95a9fc","Type":"ContainerDied","Data":"5b1e6f81e68c6ebf94af9dd086c4e0ddafd34a3a85ad00dcf8b6bc65c360ad76"} Oct 03 14:10:14 crc kubenswrapper[4861]: I1003 14:10:14.681831 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:10:14 crc kubenswrapper[4861]: E1003 14:10:14.682082 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.104828 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.193034 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.193102 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dj9r\" (UniqueName: \"kubernetes.io/projected/5a870822-9c29-4acb-b63c-2ff86a95a9fc-kube-api-access-8dj9r\") pod \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.193149 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-metadata-combined-ca-bundle\") pod \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.193225 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-inventory\") pod \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.193266 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-ssh-key\") pod \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.193433 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-nova-metadata-neutron-config-0\") pod \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\" (UID: \"5a870822-9c29-4acb-b63c-2ff86a95a9fc\") " Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.198735 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a870822-9c29-4acb-b63c-2ff86a95a9fc-kube-api-access-8dj9r" (OuterVolumeSpecName: "kube-api-access-8dj9r") pod "5a870822-9c29-4acb-b63c-2ff86a95a9fc" (UID: "5a870822-9c29-4acb-b63c-2ff86a95a9fc"). InnerVolumeSpecName "kube-api-access-8dj9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.199537 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "5a870822-9c29-4acb-b63c-2ff86a95a9fc" (UID: "5a870822-9c29-4acb-b63c-2ff86a95a9fc"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.220282 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "5a870822-9c29-4acb-b63c-2ff86a95a9fc" (UID: "5a870822-9c29-4acb-b63c-2ff86a95a9fc"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.229906 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-inventory" (OuterVolumeSpecName: "inventory") pod "5a870822-9c29-4acb-b63c-2ff86a95a9fc" (UID: "5a870822-9c29-4acb-b63c-2ff86a95a9fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.231118 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5a870822-9c29-4acb-b63c-2ff86a95a9fc" (UID: "5a870822-9c29-4acb-b63c-2ff86a95a9fc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.231735 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "5a870822-9c29-4acb-b63c-2ff86a95a9fc" (UID: "5a870822-9c29-4acb-b63c-2ff86a95a9fc"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.296312 4861 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.296533 4861 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.296624 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dj9r\" (UniqueName: \"kubernetes.io/projected/5a870822-9c29-4acb-b63c-2ff86a95a9fc-kube-api-access-8dj9r\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.296738 4861 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.296847 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.296930 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a870822-9c29-4acb-b63c-2ff86a95a9fc-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.633463 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" event={"ID":"5a870822-9c29-4acb-b63c-2ff86a95a9fc","Type":"ContainerDied","Data":"9a516491d3ca310edea77ca0d4970a43473188b6c035eeab5cdc5beefdb906d3"} Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.633823 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a516491d3ca310edea77ca0d4970a43473188b6c035eeab5cdc5beefdb906d3" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.633520 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.744357 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9"] Oct 03 14:10:16 crc kubenswrapper[4861]: E1003 14:10:16.744798 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="extract-utilities" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.744822 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="extract-utilities" Oct 03 14:10:16 crc kubenswrapper[4861]: E1003 14:10:16.744844 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="extract-content" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.744853 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="extract-content" Oct 03 14:10:16 crc kubenswrapper[4861]: E1003 14:10:16.744875 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="registry-server" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.744884 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="registry-server" Oct 03 14:10:16 crc kubenswrapper[4861]: E1003 14:10:16.744909 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a870822-9c29-4acb-b63c-2ff86a95a9fc" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.744918 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a870822-9c29-4acb-b63c-2ff86a95a9fc" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.745137 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a870822-9c29-4acb-b63c-2ff86a95a9fc" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.745170 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="7413195b-db31-4d9f-b262-bb9d86526727" containerName="registry-server" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.745898 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.748620 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.749321 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.750218 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.750475 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.750597 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.766351 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9"] Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.907398 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.907717 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.907887 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.908151 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmjd9\" (UniqueName: \"kubernetes.io/projected/fd4c1aae-4d8b-49be-ad63-d6531b244f73-kube-api-access-xmjd9\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:16 crc kubenswrapper[4861]: I1003 14:10:16.908338 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.009880 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmjd9\" (UniqueName: \"kubernetes.io/projected/fd4c1aae-4d8b-49be-ad63-d6531b244f73-kube-api-access-xmjd9\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.009951 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.009987 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.010022 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.010084 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.017799 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.017893 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.018125 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.018367 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.028440 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmjd9\" (UniqueName: \"kubernetes.io/projected/fd4c1aae-4d8b-49be-ad63-d6531b244f73-kube-api-access-xmjd9\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.063196 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.564278 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9"] Oct 03 14:10:17 crc kubenswrapper[4861]: I1003 14:10:17.642084 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" event={"ID":"fd4c1aae-4d8b-49be-ad63-d6531b244f73","Type":"ContainerStarted","Data":"65643493c3b3926e1dfef7fdf81966c1fdedcc5622490a3659e35c8077b45812"} Oct 03 14:10:18 crc kubenswrapper[4861]: I1003 14:10:18.650535 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" event={"ID":"fd4c1aae-4d8b-49be-ad63-d6531b244f73","Type":"ContainerStarted","Data":"8a6622453faec02df412108a888fd562fb4c86fe334335f7c366b8bd10df05ca"} Oct 03 14:10:18 crc kubenswrapper[4861]: I1003 14:10:18.677690 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" podStartSLOduration=2.529845321 podStartE2EDuration="2.67764316s" podCreationTimestamp="2025-10-03 14:10:16 +0000 UTC" firstStartedPulling="2025-10-03 14:10:17.565462735 +0000 UTC m=+2331.563447782" lastFinishedPulling="2025-10-03 14:10:17.713260574 +0000 UTC m=+2331.711245621" observedRunningTime="2025-10-03 14:10:18.664354449 +0000 UTC m=+2332.662339516" watchObservedRunningTime="2025-10-03 14:10:18.67764316 +0000 UTC m=+2332.675628217" Oct 03 14:10:27 crc kubenswrapper[4861]: I1003 14:10:27.680926 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:10:27 crc kubenswrapper[4861]: E1003 14:10:27.681670 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:10:41 crc kubenswrapper[4861]: I1003 14:10:41.681714 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:10:41 crc kubenswrapper[4861]: E1003 14:10:41.682467 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:10:54 crc kubenswrapper[4861]: I1003 14:10:54.681355 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:10:54 crc kubenswrapper[4861]: E1003 14:10:54.682279 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:11:06 crc kubenswrapper[4861]: I1003 14:11:06.697416 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:11:06 crc kubenswrapper[4861]: E1003 14:11:06.698385 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:11:17 crc kubenswrapper[4861]: I1003 14:11:17.682178 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:11:17 crc kubenswrapper[4861]: E1003 14:11:17.682977 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:11:28 crc kubenswrapper[4861]: I1003 14:11:28.681075 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:11:28 crc kubenswrapper[4861]: E1003 14:11:28.681793 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:11:40 crc kubenswrapper[4861]: I1003 14:11:40.682149 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:11:40 crc kubenswrapper[4861]: E1003 14:11:40.683002 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:11:51 crc kubenswrapper[4861]: I1003 14:11:51.681711 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:11:51 crc kubenswrapper[4861]: E1003 14:11:51.682658 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:12:04 crc kubenswrapper[4861]: I1003 14:12:04.682143 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:12:04 crc kubenswrapper[4861]: E1003 14:12:04.682815 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:12:19 crc kubenswrapper[4861]: I1003 14:12:19.681206 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:12:19 crc kubenswrapper[4861]: E1003 14:12:19.682179 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:12:33 crc kubenswrapper[4861]: I1003 14:12:33.681216 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:12:33 crc kubenswrapper[4861]: E1003 14:12:33.682002 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:12:44 crc kubenswrapper[4861]: I1003 14:12:44.681634 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:12:44 crc kubenswrapper[4861]: E1003 14:12:44.686546 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:12:59 crc kubenswrapper[4861]: I1003 14:12:59.681144 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:12:59 crc kubenswrapper[4861]: E1003 14:12:59.681811 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:13:10 crc kubenswrapper[4861]: I1003 14:13:10.681172 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:13:11 crc kubenswrapper[4861]: I1003 14:13:11.299842 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"05fad3e765859f2b4ff1b56618745f3a7977daa63f6f944dae7adb4c2a767290"} Oct 03 14:14:59 crc kubenswrapper[4861]: I1003 14:14:59.318493 4861 generic.go:334] "Generic (PLEG): container finished" podID="fd4c1aae-4d8b-49be-ad63-d6531b244f73" containerID="8a6622453faec02df412108a888fd562fb4c86fe334335f7c366b8bd10df05ca" exitCode=0 Oct 03 14:14:59 crc kubenswrapper[4861]: I1003 14:14:59.318557 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" event={"ID":"fd4c1aae-4d8b-49be-ad63-d6531b244f73","Type":"ContainerDied","Data":"8a6622453faec02df412108a888fd562fb4c86fe334335f7c366b8bd10df05ca"} Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.143749 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b"] Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.145643 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.152381 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.152429 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.156738 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b"] Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.277916 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x47jm\" (UniqueName: \"kubernetes.io/projected/58941f0d-ab00-4963-8608-dbbe6047db92-kube-api-access-x47jm\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.278739 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58941f0d-ab00-4963-8608-dbbe6047db92-secret-volume\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.278968 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58941f0d-ab00-4963-8608-dbbe6047db92-config-volume\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.381130 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58941f0d-ab00-4963-8608-dbbe6047db92-secret-volume\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.381290 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58941f0d-ab00-4963-8608-dbbe6047db92-config-volume\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.381367 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x47jm\" (UniqueName: \"kubernetes.io/projected/58941f0d-ab00-4963-8608-dbbe6047db92-kube-api-access-x47jm\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.384170 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58941f0d-ab00-4963-8608-dbbe6047db92-config-volume\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.392798 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58941f0d-ab00-4963-8608-dbbe6047db92-secret-volume\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.404830 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x47jm\" (UniqueName: \"kubernetes.io/projected/58941f0d-ab00-4963-8608-dbbe6047db92-kube-api-access-x47jm\") pod \"collect-profiles-29325015-5cw5b\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.465804 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.881863 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.894166 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmjd9\" (UniqueName: \"kubernetes.io/projected/fd4c1aae-4d8b-49be-ad63-d6531b244f73-kube-api-access-xmjd9\") pod \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.894201 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-combined-ca-bundle\") pod \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.894386 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-inventory\") pod \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.894433 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-ssh-key\") pod \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.894474 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-secret-0\") pod \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\" (UID: \"fd4c1aae-4d8b-49be-ad63-d6531b244f73\") " Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.906760 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4c1aae-4d8b-49be-ad63-d6531b244f73-kube-api-access-xmjd9" (OuterVolumeSpecName: "kube-api-access-xmjd9") pod "fd4c1aae-4d8b-49be-ad63-d6531b244f73" (UID: "fd4c1aae-4d8b-49be-ad63-d6531b244f73"). InnerVolumeSpecName "kube-api-access-xmjd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.908183 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "fd4c1aae-4d8b-49be-ad63-d6531b244f73" (UID: "fd4c1aae-4d8b-49be-ad63-d6531b244f73"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.931851 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-inventory" (OuterVolumeSpecName: "inventory") pod "fd4c1aae-4d8b-49be-ad63-d6531b244f73" (UID: "fd4c1aae-4d8b-49be-ad63-d6531b244f73"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.952686 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fd4c1aae-4d8b-49be-ad63-d6531b244f73" (UID: "fd4c1aae-4d8b-49be-ad63-d6531b244f73"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.958340 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "fd4c1aae-4d8b-49be-ad63-d6531b244f73" (UID: "fd4c1aae-4d8b-49be-ad63-d6531b244f73"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.995809 4861 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.995841 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmjd9\" (UniqueName: \"kubernetes.io/projected/fd4c1aae-4d8b-49be-ad63-d6531b244f73-kube-api-access-xmjd9\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.995853 4861 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.995865 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:00 crc kubenswrapper[4861]: I1003 14:15:00.995873 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4c1aae-4d8b-49be-ad63-d6531b244f73-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.047026 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b"] Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.356578 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" event={"ID":"58941f0d-ab00-4963-8608-dbbe6047db92","Type":"ContainerStarted","Data":"6b6dc1c10903517e9787d219600d2def89facee0907c90bcf4a7524ee6ccabce"} Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.359249 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" event={"ID":"fd4c1aae-4d8b-49be-ad63-d6531b244f73","Type":"ContainerDied","Data":"65643493c3b3926e1dfef7fdf81966c1fdedcc5622490a3659e35c8077b45812"} Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.359286 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65643493c3b3926e1dfef7fdf81966c1fdedcc5622490a3659e35c8077b45812" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.359350 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9" Oct 03 14:15:01 crc kubenswrapper[4861]: E1003 14:15:01.541570 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd4c1aae_4d8b_49be_ad63_d6531b244f73.slice/crio-65643493c3b3926e1dfef7fdf81966c1fdedcc5622490a3659e35c8077b45812\": RecentStats: unable to find data in memory cache]" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.547475 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m"] Oct 03 14:15:01 crc kubenswrapper[4861]: E1003 14:15:01.548200 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4c1aae-4d8b-49be-ad63-d6531b244f73" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.548254 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4c1aae-4d8b-49be-ad63-d6531b244f73" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.548492 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4c1aae-4d8b-49be-ad63-d6531b244f73" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.549429 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.554797 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.555394 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.555713 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.555978 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.556341 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.556415 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.560710 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.574331 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m"] Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.613343 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.613781 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.613950 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.614048 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.614121 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.614217 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.614412 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.614508 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.614587 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5njdp\" (UniqueName: \"kubernetes.io/projected/bc64fc8f-fe84-485b-8d52-a4e26a00435a-kube-api-access-5njdp\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.715961 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716008 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716036 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716067 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716100 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716184 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716212 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716257 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5njdp\" (UniqueName: \"kubernetes.io/projected/bc64fc8f-fe84-485b-8d52-a4e26a00435a-kube-api-access-5njdp\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.716283 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.717755 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.721434 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.722283 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.722312 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.722766 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.723508 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.731485 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.733312 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.736029 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5njdp\" (UniqueName: \"kubernetes.io/projected/bc64fc8f-fe84-485b-8d52-a4e26a00435a-kube-api-access-5njdp\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mlf7m\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:01 crc kubenswrapper[4861]: I1003 14:15:01.896854 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:15:02 crc kubenswrapper[4861]: I1003 14:15:02.368632 4861 generic.go:334] "Generic (PLEG): container finished" podID="58941f0d-ab00-4963-8608-dbbe6047db92" containerID="a084be5b16415ec0457f508cbb3de9d68399fe163dfd909171e3d8b1bec10e99" exitCode=0 Oct 03 14:15:02 crc kubenswrapper[4861]: I1003 14:15:02.368727 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" event={"ID":"58941f0d-ab00-4963-8608-dbbe6047db92","Type":"ContainerDied","Data":"a084be5b16415ec0457f508cbb3de9d68399fe163dfd909171e3d8b1bec10e99"} Oct 03 14:15:02 crc kubenswrapper[4861]: I1003 14:15:02.484903 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m"] Oct 03 14:15:02 crc kubenswrapper[4861]: I1003 14:15:02.488627 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.377623 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" event={"ID":"bc64fc8f-fe84-485b-8d52-a4e26a00435a","Type":"ContainerStarted","Data":"bf8da3f186490ef59ef68bc1a0ab8dbb596955434513485978fdf799fd829679"} Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.377919 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" event={"ID":"bc64fc8f-fe84-485b-8d52-a4e26a00435a","Type":"ContainerStarted","Data":"529cf14552a3b35c771a68933ccbdf65d8fbcf083c1df4fe70285d91c024e9d6"} Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.403935 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" podStartSLOduration=2.124990161 podStartE2EDuration="2.403915669s" podCreationTimestamp="2025-10-03 14:15:01 +0000 UTC" firstStartedPulling="2025-10-03 14:15:02.488359754 +0000 UTC m=+2616.486344801" lastFinishedPulling="2025-10-03 14:15:02.767285242 +0000 UTC m=+2616.765270309" observedRunningTime="2025-10-03 14:15:03.39981104 +0000 UTC m=+2617.397796087" watchObservedRunningTime="2025-10-03 14:15:03.403915669 +0000 UTC m=+2617.401900716" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.733728 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.859776 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58941f0d-ab00-4963-8608-dbbe6047db92-config-volume\") pod \"58941f0d-ab00-4963-8608-dbbe6047db92\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.860069 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58941f0d-ab00-4963-8608-dbbe6047db92-secret-volume\") pod \"58941f0d-ab00-4963-8608-dbbe6047db92\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.860394 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x47jm\" (UniqueName: \"kubernetes.io/projected/58941f0d-ab00-4963-8608-dbbe6047db92-kube-api-access-x47jm\") pod \"58941f0d-ab00-4963-8608-dbbe6047db92\" (UID: \"58941f0d-ab00-4963-8608-dbbe6047db92\") " Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.860497 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58941f0d-ab00-4963-8608-dbbe6047db92-config-volume" (OuterVolumeSpecName: "config-volume") pod "58941f0d-ab00-4963-8608-dbbe6047db92" (UID: "58941f0d-ab00-4963-8608-dbbe6047db92"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.861028 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58941f0d-ab00-4963-8608-dbbe6047db92-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.865543 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58941f0d-ab00-4963-8608-dbbe6047db92-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "58941f0d-ab00-4963-8608-dbbe6047db92" (UID: "58941f0d-ab00-4963-8608-dbbe6047db92"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.872009 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58941f0d-ab00-4963-8608-dbbe6047db92-kube-api-access-x47jm" (OuterVolumeSpecName: "kube-api-access-x47jm") pod "58941f0d-ab00-4963-8608-dbbe6047db92" (UID: "58941f0d-ab00-4963-8608-dbbe6047db92"). InnerVolumeSpecName "kube-api-access-x47jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.962826 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x47jm\" (UniqueName: \"kubernetes.io/projected/58941f0d-ab00-4963-8608-dbbe6047db92-kube-api-access-x47jm\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:03 crc kubenswrapper[4861]: I1003 14:15:03.962867 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58941f0d-ab00-4963-8608-dbbe6047db92-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:04 crc kubenswrapper[4861]: I1003 14:15:04.404136 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" Oct 03 14:15:04 crc kubenswrapper[4861]: I1003 14:15:04.404223 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b" event={"ID":"58941f0d-ab00-4963-8608-dbbe6047db92","Type":"ContainerDied","Data":"6b6dc1c10903517e9787d219600d2def89facee0907c90bcf4a7524ee6ccabce"} Oct 03 14:15:04 crc kubenswrapper[4861]: I1003 14:15:04.404287 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b6dc1c10903517e9787d219600d2def89facee0907c90bcf4a7524ee6ccabce" Oct 03 14:15:04 crc kubenswrapper[4861]: I1003 14:15:04.842250 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6"] Oct 03 14:15:04 crc kubenswrapper[4861]: I1003 14:15:04.852249 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-q4nf6"] Oct 03 14:15:06 crc kubenswrapper[4861]: I1003 14:15:06.713468 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c3c7089-d98e-4504-91c2-27851ed21d16" path="/var/lib/kubelet/pods/0c3c7089-d98e-4504-91c2-27851ed21d16/volumes" Oct 03 14:15:30 crc kubenswrapper[4861]: I1003 14:15:30.145443 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:15:30 crc kubenswrapper[4861]: I1003 14:15:30.146704 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:15:58 crc kubenswrapper[4861]: I1003 14:15:58.828013 4861 scope.go:117] "RemoveContainer" containerID="b9ea19e2e924a668a8c9fd3450ddc14d94b6a5f9c680b84da2a77ad9e4691963" Oct 03 14:16:00 crc kubenswrapper[4861]: I1003 14:16:00.144694 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:16:00 crc kubenswrapper[4861]: I1003 14:16:00.144754 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:16:30 crc kubenswrapper[4861]: I1003 14:16:30.145588 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:16:30 crc kubenswrapper[4861]: I1003 14:16:30.146103 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:16:30 crc kubenswrapper[4861]: I1003 14:16:30.146147 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:16:30 crc kubenswrapper[4861]: I1003 14:16:30.146672 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05fad3e765859f2b4ff1b56618745f3a7977daa63f6f944dae7adb4c2a767290"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:16:30 crc kubenswrapper[4861]: I1003 14:16:30.146727 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://05fad3e765859f2b4ff1b56618745f3a7977daa63f6f944dae7adb4c2a767290" gracePeriod=600 Oct 03 14:16:31 crc kubenswrapper[4861]: I1003 14:16:31.191123 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="05fad3e765859f2b4ff1b56618745f3a7977daa63f6f944dae7adb4c2a767290" exitCode=0 Oct 03 14:16:31 crc kubenswrapper[4861]: I1003 14:16:31.191276 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"05fad3e765859f2b4ff1b56618745f3a7977daa63f6f944dae7adb4c2a767290"} Oct 03 14:16:31 crc kubenswrapper[4861]: I1003 14:16:31.192345 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319"} Oct 03 14:16:31 crc kubenswrapper[4861]: I1003 14:16:31.192422 4861 scope.go:117] "RemoveContainer" containerID="1cdb234382b9308b9a1f3635af42d3c4e786f277d88eba9e4b15d46ab010519c" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.358853 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z8jlq"] Oct 03 14:17:45 crc kubenswrapper[4861]: E1003 14:17:45.363903 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58941f0d-ab00-4963-8608-dbbe6047db92" containerName="collect-profiles" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.363959 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="58941f0d-ab00-4963-8608-dbbe6047db92" containerName="collect-profiles" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.364337 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="58941f0d-ab00-4963-8608-dbbe6047db92" containerName="collect-profiles" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.365775 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.373300 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z8jlq"] Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.457482 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrrzq\" (UniqueName: \"kubernetes.io/projected/c0980709-292e-471e-b8b5-f3e4a8e7c119-kube-api-access-nrrzq\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.457837 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-catalog-content\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.457875 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-utilities\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.559967 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrrzq\" (UniqueName: \"kubernetes.io/projected/c0980709-292e-471e-b8b5-f3e4a8e7c119-kube-api-access-nrrzq\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.560413 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-catalog-content\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.560582 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-utilities\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.560893 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-catalog-content\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.561295 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-utilities\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.592641 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrrzq\" (UniqueName: \"kubernetes.io/projected/c0980709-292e-471e-b8b5-f3e4a8e7c119-kube-api-access-nrrzq\") pod \"redhat-operators-z8jlq\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:45 crc kubenswrapper[4861]: I1003 14:17:45.695873 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:17:46 crc kubenswrapper[4861]: I1003 14:17:46.008519 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z8jlq"] Oct 03 14:17:46 crc kubenswrapper[4861]: W1003 14:17:46.018304 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice/crio-febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52 WatchSource:0}: Error finding container febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52: Status 404 returned error can't find the container with id febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52 Oct 03 14:17:46 crc kubenswrapper[4861]: I1003 14:17:46.945810 4861 generic.go:334] "Generic (PLEG): container finished" podID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerID="7ec3197b0491f6a87e795d6bc7226e291af78f157615f4999e08882713c303f0" exitCode=0 Oct 03 14:17:46 crc kubenswrapper[4861]: I1003 14:17:46.945905 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerDied","Data":"7ec3197b0491f6a87e795d6bc7226e291af78f157615f4999e08882713c303f0"} Oct 03 14:17:46 crc kubenswrapper[4861]: I1003 14:17:46.947094 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerStarted","Data":"febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52"} Oct 03 14:17:48 crc kubenswrapper[4861]: I1003 14:17:48.965340 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerStarted","Data":"28bf6787f8d1e1279ce552588af286cafcbbfa77c5494d833075750097fd40fc"} Oct 03 14:18:03 crc kubenswrapper[4861]: I1003 14:18:03.092823 4861 generic.go:334] "Generic (PLEG): container finished" podID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerID="28bf6787f8d1e1279ce552588af286cafcbbfa77c5494d833075750097fd40fc" exitCode=0 Oct 03 14:18:03 crc kubenswrapper[4861]: I1003 14:18:03.092930 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerDied","Data":"28bf6787f8d1e1279ce552588af286cafcbbfa77c5494d833075750097fd40fc"} Oct 03 14:18:05 crc kubenswrapper[4861]: I1003 14:18:05.114001 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerStarted","Data":"c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214"} Oct 03 14:18:05 crc kubenswrapper[4861]: I1003 14:18:05.696468 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:18:05 crc kubenswrapper[4861]: I1003 14:18:05.696880 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:18:06 crc kubenswrapper[4861]: I1003 14:18:06.746801 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z8jlq" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="registry-server" probeResult="failure" output=< Oct 03 14:18:06 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:18:06 crc kubenswrapper[4861]: > Oct 03 14:18:15 crc kubenswrapper[4861]: I1003 14:18:15.763334 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:18:15 crc kubenswrapper[4861]: I1003 14:18:15.784076 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z8jlq" podStartSLOduration=13.667679719 podStartE2EDuration="30.784060973s" podCreationTimestamp="2025-10-03 14:17:45 +0000 UTC" firstStartedPulling="2025-10-03 14:17:46.947929875 +0000 UTC m=+2780.945914922" lastFinishedPulling="2025-10-03 14:18:04.064311129 +0000 UTC m=+2798.062296176" observedRunningTime="2025-10-03 14:18:05.138262553 +0000 UTC m=+2799.136247620" watchObservedRunningTime="2025-10-03 14:18:15.784060973 +0000 UTC m=+2809.782046020" Oct 03 14:18:15 crc kubenswrapper[4861]: I1003 14:18:15.840397 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:18:16 crc kubenswrapper[4861]: I1003 14:18:16.555385 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z8jlq"] Oct 03 14:18:17 crc kubenswrapper[4861]: I1003 14:18:17.234506 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z8jlq" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="registry-server" containerID="cri-o://c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214" gracePeriod=2 Oct 03 14:18:19 crc kubenswrapper[4861]: I1003 14:18:19.793449 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ceilometer-0" podUID="14629b29-a838-444c-9acf-42c14b7dbe5a" containerName="ceilometer-central-agent" probeResult="failure" output="command timed out" Oct 03 14:18:24 crc kubenswrapper[4861]: I1003 14:18:24.796827 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ceilometer-0" podUID="14629b29-a838-444c-9acf-42c14b7dbe5a" containerName="ceilometer-central-agent" probeResult="failure" output="command timed out" Oct 03 14:18:25 crc kubenswrapper[4861]: E1003 14:18:25.697710 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214 is running failed: container process not found" containerID="c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 14:18:25 crc kubenswrapper[4861]: E1003 14:18:25.698687 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214 is running failed: container process not found" containerID="c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 14:18:25 crc kubenswrapper[4861]: E1003 14:18:25.699145 4861 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214 is running failed: container process not found" containerID="c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214" cmd=["grpc_health_probe","-addr=:50051"] Oct 03 14:18:25 crc kubenswrapper[4861]: E1003 14:18:25.699185 4861 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-z8jlq" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="registry-server" Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.866601 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-z8jlq_c0980709-292e-471e-b8b5-f3e4a8e7c119/registry-server/0.log" Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.873826 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.971594 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-catalog-content\") pod \"c0980709-292e-471e-b8b5-f3e4a8e7c119\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.972059 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-utilities\") pod \"c0980709-292e-471e-b8b5-f3e4a8e7c119\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.972170 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrrzq\" (UniqueName: \"kubernetes.io/projected/c0980709-292e-471e-b8b5-f3e4a8e7c119-kube-api-access-nrrzq\") pod \"c0980709-292e-471e-b8b5-f3e4a8e7c119\" (UID: \"c0980709-292e-471e-b8b5-f3e4a8e7c119\") " Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.977847 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-utilities" (OuterVolumeSpecName: "utilities") pod "c0980709-292e-471e-b8b5-f3e4a8e7c119" (UID: "c0980709-292e-471e-b8b5-f3e4a8e7c119"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.981147 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0980709-292e-471e-b8b5-f3e4a8e7c119-kube-api-access-nrrzq" (OuterVolumeSpecName: "kube-api-access-nrrzq") pod "c0980709-292e-471e-b8b5-f3e4a8e7c119" (UID: "c0980709-292e-471e-b8b5-f3e4a8e7c119"). InnerVolumeSpecName "kube-api-access-nrrzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.985141 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-z8jlq_c0980709-292e-471e-b8b5-f3e4a8e7c119/registry-server/0.log" Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.986246 4861 generic.go:334] "Generic (PLEG): container finished" podID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerID="c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214" exitCode=137 Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.986279 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerDied","Data":"c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214"} Oct 03 14:18:25 crc kubenswrapper[4861]: I1003 14:18:25.986309 4861 scope.go:117] "RemoveContainer" containerID="c68e35a2cb788f76bdf292c6e0f2cd13834aecc5f0d709f99f5a62ba95559214" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.057978 4861 scope.go:117] "RemoveContainer" containerID="28bf6787f8d1e1279ce552588af286cafcbbfa77c5494d833075750097fd40fc" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.065517 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0980709-292e-471e-b8b5-f3e4a8e7c119" (UID: "c0980709-292e-471e-b8b5-f3e4a8e7c119"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.075251 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.075455 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0980709-292e-471e-b8b5-f3e4a8e7c119-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.075518 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrrzq\" (UniqueName: \"kubernetes.io/projected/c0980709-292e-471e-b8b5-f3e4a8e7c119-kube-api-access-nrrzq\") on node \"crc\" DevicePath \"\"" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.079309 4861 scope.go:117] "RemoveContainer" containerID="7ec3197b0491f6a87e795d6bc7226e291af78f157615f4999e08882713c303f0" Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.994042 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z8jlq" event={"ID":"c0980709-292e-471e-b8b5-f3e4a8e7c119","Type":"ContainerDied","Data":"febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52"} Oct 03 14:18:26 crc kubenswrapper[4861]: I1003 14:18:26.994104 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z8jlq" Oct 03 14:18:27 crc kubenswrapper[4861]: I1003 14:18:27.016011 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z8jlq"] Oct 03 14:18:27 crc kubenswrapper[4861]: I1003 14:18:27.022645 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z8jlq"] Oct 03 14:18:28 crc kubenswrapper[4861]: I1003 14:18:28.691773 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" path="/var/lib/kubelet/pods/c0980709-292e-471e-b8b5-f3e4a8e7c119/volumes" Oct 03 14:18:30 crc kubenswrapper[4861]: I1003 14:18:30.144960 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:18:30 crc kubenswrapper[4861]: I1003 14:18:30.145054 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:18:36 crc kubenswrapper[4861]: E1003 14:18:36.901451 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice/crio-febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52\": RecentStats: unable to find data in memory cache]" Oct 03 14:18:47 crc kubenswrapper[4861]: E1003 14:18:47.153065 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice/crio-febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice\": RecentStats: unable to find data in memory cache]" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.019550 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dl8gr"] Oct 03 14:18:57 crc kubenswrapper[4861]: E1003 14:18:57.020485 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="registry-server" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.020500 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="registry-server" Oct 03 14:18:57 crc kubenswrapper[4861]: E1003 14:18:57.020511 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="extract-content" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.020518 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="extract-content" Oct 03 14:18:57 crc kubenswrapper[4861]: E1003 14:18:57.020544 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="extract-utilities" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.020552 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="extract-utilities" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.020784 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0980709-292e-471e-b8b5-f3e4a8e7c119" containerName="registry-server" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.022824 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.034001 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dl8gr"] Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.174990 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h4p9\" (UniqueName: \"kubernetes.io/projected/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-kube-api-access-7h4p9\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.175252 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-utilities\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.175386 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-catalog-content\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.277569 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h4p9\" (UniqueName: \"kubernetes.io/projected/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-kube-api-access-7h4p9\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.277923 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-utilities\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.278036 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-catalog-content\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.278586 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-catalog-content\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.278679 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-utilities\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.299174 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h4p9\" (UniqueName: \"kubernetes.io/projected/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-kube-api-access-7h4p9\") pod \"redhat-marketplace-dl8gr\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.343540 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:18:57 crc kubenswrapper[4861]: E1003 14:18:57.440575 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice/crio-febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52\": RecentStats: unable to find data in memory cache]" Oct 03 14:18:57 crc kubenswrapper[4861]: I1003 14:18:57.843624 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dl8gr"] Oct 03 14:18:58 crc kubenswrapper[4861]: I1003 14:18:58.275621 4861 generic.go:334] "Generic (PLEG): container finished" podID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerID="3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3" exitCode=0 Oct 03 14:18:58 crc kubenswrapper[4861]: I1003 14:18:58.275967 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dl8gr" event={"ID":"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1","Type":"ContainerDied","Data":"3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3"} Oct 03 14:18:58 crc kubenswrapper[4861]: I1003 14:18:58.276016 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dl8gr" event={"ID":"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1","Type":"ContainerStarted","Data":"c197333071fb14f3d4a498cd2b41bf3315dd204d651a86d72575c35841df5f6d"} Oct 03 14:19:00 crc kubenswrapper[4861]: I1003 14:19:00.146620 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:19:00 crc kubenswrapper[4861]: I1003 14:19:00.147057 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:19:00 crc kubenswrapper[4861]: I1003 14:19:00.302381 4861 generic.go:334] "Generic (PLEG): container finished" podID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerID="f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a" exitCode=0 Oct 03 14:19:00 crc kubenswrapper[4861]: I1003 14:19:00.302439 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dl8gr" event={"ID":"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1","Type":"ContainerDied","Data":"f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a"} Oct 03 14:19:01 crc kubenswrapper[4861]: I1003 14:19:01.316170 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dl8gr" event={"ID":"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1","Type":"ContainerStarted","Data":"cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e"} Oct 03 14:19:01 crc kubenswrapper[4861]: I1003 14:19:01.353331 4861 generic.go:334] "Generic (PLEG): container finished" podID="bc64fc8f-fe84-485b-8d52-a4e26a00435a" containerID="bf8da3f186490ef59ef68bc1a0ab8dbb596955434513485978fdf799fd829679" exitCode=0 Oct 03 14:19:01 crc kubenswrapper[4861]: I1003 14:19:01.353376 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" event={"ID":"bc64fc8f-fe84-485b-8d52-a4e26a00435a","Type":"ContainerDied","Data":"bf8da3f186490ef59ef68bc1a0ab8dbb596955434513485978fdf799fd829679"} Oct 03 14:19:01 crc kubenswrapper[4861]: I1003 14:19:01.381178 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dl8gr" podStartSLOduration=2.718288382 podStartE2EDuration="5.38115848s" podCreationTimestamp="2025-10-03 14:18:56 +0000 UTC" firstStartedPulling="2025-10-03 14:18:58.277737977 +0000 UTC m=+2852.275723024" lastFinishedPulling="2025-10-03 14:19:00.940608075 +0000 UTC m=+2854.938593122" observedRunningTime="2025-10-03 14:19:01.353141273 +0000 UTC m=+2855.351126340" watchObservedRunningTime="2025-10-03 14:19:01.38115848 +0000 UTC m=+2855.379143527" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.809375 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.899850 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-1\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.899922 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-ssh-key\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.899979 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-0\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.900037 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-0\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.900119 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5njdp\" (UniqueName: \"kubernetes.io/projected/bc64fc8f-fe84-485b-8d52-a4e26a00435a-kube-api-access-5njdp\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.900155 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-inventory\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.900326 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-1\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.900400 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-extra-config-0\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.900427 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-combined-ca-bundle\") pod \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\" (UID: \"bc64fc8f-fe84-485b-8d52-a4e26a00435a\") " Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.907044 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc64fc8f-fe84-485b-8d52-a4e26a00435a-kube-api-access-5njdp" (OuterVolumeSpecName: "kube-api-access-5njdp") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "kube-api-access-5njdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.914482 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.950706 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-inventory" (OuterVolumeSpecName: "inventory") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.958458 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.976554 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.980420 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.984589 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.993533 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:19:02 crc kubenswrapper[4861]: I1003 14:19:02.997066 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "bc64fc8f-fe84-485b-8d52-a4e26a00435a" (UID: "bc64fc8f-fe84-485b-8d52-a4e26a00435a"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.002959 4861 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.002985 4861 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.002996 4861 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.003006 4861 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.003017 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.003027 4861 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.003037 4861 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.003047 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5njdp\" (UniqueName: \"kubernetes.io/projected/bc64fc8f-fe84-485b-8d52-a4e26a00435a-kube-api-access-5njdp\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.003399 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc64fc8f-fe84-485b-8d52-a4e26a00435a-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.370048 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" event={"ID":"bc64fc8f-fe84-485b-8d52-a4e26a00435a","Type":"ContainerDied","Data":"529cf14552a3b35c771a68933ccbdf65d8fbcf083c1df4fe70285d91c024e9d6"} Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.370535 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="529cf14552a3b35c771a68933ccbdf65d8fbcf083c1df4fe70285d91c024e9d6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.370478 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mlf7m" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.490022 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6"] Oct 03 14:19:03 crc kubenswrapper[4861]: E1003 14:19:03.490500 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc64fc8f-fe84-485b-8d52-a4e26a00435a" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.490518 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc64fc8f-fe84-485b-8d52-a4e26a00435a" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.490711 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc64fc8f-fe84-485b-8d52-a4e26a00435a" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.491390 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.494413 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.494726 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.496527 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bd7xg" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.496708 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.497408 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.508660 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6"] Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.613612 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.613912 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.614106 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.614330 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.614542 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.614615 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.614675 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-544mw\" (UniqueName: \"kubernetes.io/projected/3f63691e-8f0c-4494-a774-46fe7aaba3c9-kube-api-access-544mw\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.716867 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.716921 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.716970 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.717042 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.717083 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.717116 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-544mw\" (UniqueName: \"kubernetes.io/projected/3f63691e-8f0c-4494-a774-46fe7aaba3c9-kube-api-access-544mw\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.717161 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.722265 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.724517 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.725259 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.730512 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.732487 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.734155 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-544mw\" (UniqueName: \"kubernetes.io/projected/3f63691e-8f0c-4494-a774-46fe7aaba3c9-kube-api-access-544mw\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.735083 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:03 crc kubenswrapper[4861]: I1003 14:19:03.812713 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:19:04 crc kubenswrapper[4861]: I1003 14:19:04.425652 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6"] Oct 03 14:19:04 crc kubenswrapper[4861]: W1003 14:19:04.428724 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f63691e_8f0c_4494_a774_46fe7aaba3c9.slice/crio-909c3a6159e5b62f239241390b2872676c00042c4c2242e8530fcb38ec38de35 WatchSource:0}: Error finding container 909c3a6159e5b62f239241390b2872676c00042c4c2242e8530fcb38ec38de35: Status 404 returned error can't find the container with id 909c3a6159e5b62f239241390b2872676c00042c4c2242e8530fcb38ec38de35 Oct 03 14:19:05 crc kubenswrapper[4861]: I1003 14:19:05.393509 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" event={"ID":"3f63691e-8f0c-4494-a774-46fe7aaba3c9","Type":"ContainerStarted","Data":"b468b4d81948a9830b6d0f13c3a3d274c777270733c7e6b665b02a1c259987fd"} Oct 03 14:19:05 crc kubenswrapper[4861]: I1003 14:19:05.394137 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" event={"ID":"3f63691e-8f0c-4494-a774-46fe7aaba3c9","Type":"ContainerStarted","Data":"909c3a6159e5b62f239241390b2872676c00042c4c2242e8530fcb38ec38de35"} Oct 03 14:19:05 crc kubenswrapper[4861]: I1003 14:19:05.418000 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" podStartSLOduration=2.235073864 podStartE2EDuration="2.417983781s" podCreationTimestamp="2025-10-03 14:19:03 +0000 UTC" firstStartedPulling="2025-10-03 14:19:04.430736099 +0000 UTC m=+2858.428721146" lastFinishedPulling="2025-10-03 14:19:04.613646016 +0000 UTC m=+2858.611631063" observedRunningTime="2025-10-03 14:19:05.411282643 +0000 UTC m=+2859.409267690" watchObservedRunningTime="2025-10-03 14:19:05.417983781 +0000 UTC m=+2859.415968828" Oct 03 14:19:07 crc kubenswrapper[4861]: I1003 14:19:07.344487 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:19:07 crc kubenswrapper[4861]: I1003 14:19:07.344852 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:19:07 crc kubenswrapper[4861]: I1003 14:19:07.387607 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:19:07 crc kubenswrapper[4861]: I1003 14:19:07.458482 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:19:07 crc kubenswrapper[4861]: I1003 14:19:07.623483 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dl8gr"] Oct 03 14:19:07 crc kubenswrapper[4861]: E1003 14:19:07.738803 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice/crio-febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice\": RecentStats: unable to find data in memory cache]" Oct 03 14:19:09 crc kubenswrapper[4861]: I1003 14:19:09.430514 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dl8gr" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="registry-server" containerID="cri-o://cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e" gracePeriod=2 Oct 03 14:19:09 crc kubenswrapper[4861]: I1003 14:19:09.890801 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.047931 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h4p9\" (UniqueName: \"kubernetes.io/projected/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-kube-api-access-7h4p9\") pod \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.048000 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-utilities\") pod \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.048036 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-catalog-content\") pod \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\" (UID: \"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1\") " Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.048823 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-utilities" (OuterVolumeSpecName: "utilities") pod "b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" (UID: "b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.049349 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.053529 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-kube-api-access-7h4p9" (OuterVolumeSpecName: "kube-api-access-7h4p9") pod "b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" (UID: "b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1"). InnerVolumeSpecName "kube-api-access-7h4p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.065003 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" (UID: "b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.150833 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h4p9\" (UniqueName: \"kubernetes.io/projected/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-kube-api-access-7h4p9\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.151136 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.438187 4861 generic.go:334] "Generic (PLEG): container finished" podID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerID="cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e" exitCode=0 Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.438250 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dl8gr" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.438222 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dl8gr" event={"ID":"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1","Type":"ContainerDied","Data":"cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e"} Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.438326 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dl8gr" event={"ID":"b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1","Type":"ContainerDied","Data":"c197333071fb14f3d4a498cd2b41bf3315dd204d651a86d72575c35841df5f6d"} Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.438345 4861 scope.go:117] "RemoveContainer" containerID="cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.460053 4861 scope.go:117] "RemoveContainer" containerID="f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.482798 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dl8gr"] Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.488907 4861 scope.go:117] "RemoveContainer" containerID="3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.492508 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dl8gr"] Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.526045 4861 scope.go:117] "RemoveContainer" containerID="cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e" Oct 03 14:19:10 crc kubenswrapper[4861]: E1003 14:19:10.526405 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e\": container with ID starting with cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e not found: ID does not exist" containerID="cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.526435 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e"} err="failed to get container status \"cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e\": rpc error: code = NotFound desc = could not find container \"cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e\": container with ID starting with cff533e8617967bf8f6b2f86b9dfdb90ac2e1e8c4e0273cbe16e42828880f02e not found: ID does not exist" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.526454 4861 scope.go:117] "RemoveContainer" containerID="f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a" Oct 03 14:19:10 crc kubenswrapper[4861]: E1003 14:19:10.526687 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a\": container with ID starting with f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a not found: ID does not exist" containerID="f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.526715 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a"} err="failed to get container status \"f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a\": rpc error: code = NotFound desc = could not find container \"f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a\": container with ID starting with f2a339d2e3df85da89650c170c13bf0190893755fdfbb21b4408d8b3259c255a not found: ID does not exist" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.526736 4861 scope.go:117] "RemoveContainer" containerID="3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3" Oct 03 14:19:10 crc kubenswrapper[4861]: E1003 14:19:10.526949 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3\": container with ID starting with 3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3 not found: ID does not exist" containerID="3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.526973 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3"} err="failed to get container status \"3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3\": rpc error: code = NotFound desc = could not find container \"3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3\": container with ID starting with 3dfe6f7fc96e1510ba32628d777f6002f0cb1a84b4ced2ba490468be43e37cb3 not found: ID does not exist" Oct 03 14:19:10 crc kubenswrapper[4861]: I1003 14:19:10.691503 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" path="/var/lib/kubelet/pods/b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1/volumes" Oct 03 14:19:17 crc kubenswrapper[4861]: E1003 14:19:17.991019 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0980709_292e_471e_b8b5_f3e4a8e7c119.slice/crio-febef003b1775f6ae85dda1260f2596ac228c316a471514e6b0fefa4322c0b52\": RecentStats: unable to find data in memory cache]" Oct 03 14:19:26 crc kubenswrapper[4861]: E1003 14:19:26.714683 4861 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/ebc35b59954b55194c8b69ea6013ccfc18c5fe40498bdaee5038ff01a27dbff3/diff" to get inode usage: stat /var/lib/containers/storage/overlay/ebc35b59954b55194c8b69ea6013ccfc18c5fe40498bdaee5038ff01a27dbff3/diff: no such file or directory, extraDiskErr: Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.145491 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.145896 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.145929 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.146605 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.146658 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" gracePeriod=600 Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.711811 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" exitCode=0 Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.711868 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319"} Oct 03 14:19:30 crc kubenswrapper[4861]: I1003 14:19:30.711909 4861 scope.go:117] "RemoveContainer" containerID="05fad3e765859f2b4ff1b56618745f3a7977daa63f6f944dae7adb4c2a767290" Oct 03 14:19:30 crc kubenswrapper[4861]: E1003 14:19:30.818962 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:19:31 crc kubenswrapper[4861]: I1003 14:19:31.722515 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:19:31 crc kubenswrapper[4861]: E1003 14:19:31.723653 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.659554 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rx8fr"] Oct 03 14:19:42 crc kubenswrapper[4861]: E1003 14:19:42.660559 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="registry-server" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.660578 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="registry-server" Oct 03 14:19:42 crc kubenswrapper[4861]: E1003 14:19:42.660616 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="extract-content" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.660624 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="extract-content" Oct 03 14:19:42 crc kubenswrapper[4861]: E1003 14:19:42.660641 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="extract-utilities" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.660651 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="extract-utilities" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.660881 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4e809cf-20ec-4e0f-986d-b1e6c0fae5c1" containerName="registry-server" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.662549 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.693298 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx8fr"] Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.798693 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzwdk\" (UniqueName: \"kubernetes.io/projected/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-kube-api-access-bzwdk\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.798802 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-utilities\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.798867 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-catalog-content\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.900977 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzwdk\" (UniqueName: \"kubernetes.io/projected/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-kube-api-access-bzwdk\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.901069 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-utilities\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.901113 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-catalog-content\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.901582 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-utilities\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.901645 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-catalog-content\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.923845 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzwdk\" (UniqueName: \"kubernetes.io/projected/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-kube-api-access-bzwdk\") pod \"community-operators-rx8fr\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:42 crc kubenswrapper[4861]: I1003 14:19:42.990745 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:43 crc kubenswrapper[4861]: I1003 14:19:43.446424 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx8fr"] Oct 03 14:19:43 crc kubenswrapper[4861]: I1003 14:19:43.858071 4861 generic.go:334] "Generic (PLEG): container finished" podID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerID="03f5ff68b52c357bbb9d9d6607b45abe92a0f1f31a006cba18eb7582e203a44e" exitCode=0 Oct 03 14:19:43 crc kubenswrapper[4861]: I1003 14:19:43.858168 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerDied","Data":"03f5ff68b52c357bbb9d9d6607b45abe92a0f1f31a006cba18eb7582e203a44e"} Oct 03 14:19:43 crc kubenswrapper[4861]: I1003 14:19:43.858602 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerStarted","Data":"4512ac45839520aa7792256f9e8f338437c4f58562cc5fd7462503403e8f0ebb"} Oct 03 14:19:45 crc kubenswrapper[4861]: I1003 14:19:45.681756 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:19:45 crc kubenswrapper[4861]: E1003 14:19:45.682650 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:19:45 crc kubenswrapper[4861]: I1003 14:19:45.876194 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerStarted","Data":"58c4ead175adec9d270e5379e50d77bed057693d2f4f0965de0fe210a4e6026a"} Oct 03 14:19:46 crc kubenswrapper[4861]: I1003 14:19:46.886484 4861 generic.go:334] "Generic (PLEG): container finished" podID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerID="58c4ead175adec9d270e5379e50d77bed057693d2f4f0965de0fe210a4e6026a" exitCode=0 Oct 03 14:19:46 crc kubenswrapper[4861]: I1003 14:19:46.886544 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerDied","Data":"58c4ead175adec9d270e5379e50d77bed057693d2f4f0965de0fe210a4e6026a"} Oct 03 14:19:47 crc kubenswrapper[4861]: I1003 14:19:47.898623 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerStarted","Data":"603886c1fa7045d8d843d06592c80712f53efc616462681a9386082683d3c600"} Oct 03 14:19:47 crc kubenswrapper[4861]: I1003 14:19:47.942028 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rx8fr" podStartSLOduration=2.238452439 podStartE2EDuration="5.942003154s" podCreationTimestamp="2025-10-03 14:19:42 +0000 UTC" firstStartedPulling="2025-10-03 14:19:43.860074859 +0000 UTC m=+2897.858059906" lastFinishedPulling="2025-10-03 14:19:47.563625574 +0000 UTC m=+2901.561610621" observedRunningTime="2025-10-03 14:19:47.93400173 +0000 UTC m=+2901.931986777" watchObservedRunningTime="2025-10-03 14:19:47.942003154 +0000 UTC m=+2901.939988201" Oct 03 14:19:49 crc kubenswrapper[4861]: I1003 14:19:49.866514 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k24bh"] Oct 03 14:19:49 crc kubenswrapper[4861]: I1003 14:19:49.868970 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:49 crc kubenswrapper[4861]: I1003 14:19:49.898172 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k24bh"] Oct 03 14:19:49 crc kubenswrapper[4861]: I1003 14:19:49.942870 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-catalog-content\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:49 crc kubenswrapper[4861]: I1003 14:19:49.942932 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzbf2\" (UniqueName: \"kubernetes.io/projected/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-kube-api-access-bzbf2\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:49 crc kubenswrapper[4861]: I1003 14:19:49.943011 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-utilities\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.044600 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-utilities\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.044770 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-catalog-content\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.044807 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzbf2\" (UniqueName: \"kubernetes.io/projected/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-kube-api-access-bzbf2\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.045127 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-utilities\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.045243 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-catalog-content\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.073102 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzbf2\" (UniqueName: \"kubernetes.io/projected/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-kube-api-access-bzbf2\") pod \"certified-operators-k24bh\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.197294 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.812299 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k24bh"] Oct 03 14:19:50 crc kubenswrapper[4861]: W1003 14:19:50.815564 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ae53bb5_e8b6_43e0_8432_2841f282ae1c.slice/crio-518c1c76bc78be06f5f7ad531ab56a3615b20137fa1270f2694f747656351e32 WatchSource:0}: Error finding container 518c1c76bc78be06f5f7ad531ab56a3615b20137fa1270f2694f747656351e32: Status 404 returned error can't find the container with id 518c1c76bc78be06f5f7ad531ab56a3615b20137fa1270f2694f747656351e32 Oct 03 14:19:50 crc kubenswrapper[4861]: I1003 14:19:50.926873 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerStarted","Data":"518c1c76bc78be06f5f7ad531ab56a3615b20137fa1270f2694f747656351e32"} Oct 03 14:19:51 crc kubenswrapper[4861]: I1003 14:19:51.936792 4861 generic.go:334] "Generic (PLEG): container finished" podID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerID="f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13" exitCode=0 Oct 03 14:19:51 crc kubenswrapper[4861]: I1003 14:19:51.937214 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerDied","Data":"f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13"} Oct 03 14:19:52 crc kubenswrapper[4861]: I1003 14:19:52.991794 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:52 crc kubenswrapper[4861]: I1003 14:19:52.991841 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:53 crc kubenswrapper[4861]: I1003 14:19:53.045464 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:54 crc kubenswrapper[4861]: I1003 14:19:54.035575 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:54 crc kubenswrapper[4861]: I1003 14:19:54.850013 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx8fr"] Oct 03 14:19:54 crc kubenswrapper[4861]: I1003 14:19:54.967373 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerStarted","Data":"b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981"} Oct 03 14:19:55 crc kubenswrapper[4861]: I1003 14:19:55.977422 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rx8fr" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="registry-server" containerID="cri-o://603886c1fa7045d8d843d06592c80712f53efc616462681a9386082683d3c600" gracePeriod=2 Oct 03 14:19:57 crc kubenswrapper[4861]: I1003 14:19:57.997066 4861 generic.go:334] "Generic (PLEG): container finished" podID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerID="603886c1fa7045d8d843d06592c80712f53efc616462681a9386082683d3c600" exitCode=0 Oct 03 14:19:57 crc kubenswrapper[4861]: I1003 14:19:57.997117 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerDied","Data":"603886c1fa7045d8d843d06592c80712f53efc616462681a9386082683d3c600"} Oct 03 14:19:57 crc kubenswrapper[4861]: I1003 14:19:57.997484 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx8fr" event={"ID":"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3","Type":"ContainerDied","Data":"4512ac45839520aa7792256f9e8f338437c4f58562cc5fd7462503403e8f0ebb"} Oct 03 14:19:57 crc kubenswrapper[4861]: I1003 14:19:57.997504 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4512ac45839520aa7792256f9e8f338437c4f58562cc5fd7462503403e8f0ebb" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.024697 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.112316 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-catalog-content\") pod \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.112480 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzwdk\" (UniqueName: \"kubernetes.io/projected/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-kube-api-access-bzwdk\") pod \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.112651 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-utilities\") pod \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\" (UID: \"84a87eeb-a396-43e2-9959-ecb9ad7a2ed3\") " Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.113563 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-utilities" (OuterVolumeSpecName: "utilities") pod "84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" (UID: "84a87eeb-a396-43e2-9959-ecb9ad7a2ed3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.120211 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-kube-api-access-bzwdk" (OuterVolumeSpecName: "kube-api-access-bzwdk") pod "84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" (UID: "84a87eeb-a396-43e2-9959-ecb9ad7a2ed3"). InnerVolumeSpecName "kube-api-access-bzwdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.178688 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" (UID: "84a87eeb-a396-43e2-9959-ecb9ad7a2ed3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.216715 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzwdk\" (UniqueName: \"kubernetes.io/projected/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-kube-api-access-bzwdk\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.216749 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:58 crc kubenswrapper[4861]: I1003 14:19:58.216759 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:19:59 crc kubenswrapper[4861]: I1003 14:19:59.009419 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx8fr" Oct 03 14:19:59 crc kubenswrapper[4861]: I1003 14:19:59.048114 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx8fr"] Oct 03 14:19:59 crc kubenswrapper[4861]: I1003 14:19:59.059516 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rx8fr"] Oct 03 14:20:00 crc kubenswrapper[4861]: I1003 14:20:00.021097 4861 generic.go:334] "Generic (PLEG): container finished" podID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerID="b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981" exitCode=0 Oct 03 14:20:00 crc kubenswrapper[4861]: I1003 14:20:00.021508 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerDied","Data":"b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981"} Oct 03 14:20:00 crc kubenswrapper[4861]: I1003 14:20:00.681607 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:20:00 crc kubenswrapper[4861]: E1003 14:20:00.681881 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:20:00 crc kubenswrapper[4861]: I1003 14:20:00.695113 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" path="/var/lib/kubelet/pods/84a87eeb-a396-43e2-9959-ecb9ad7a2ed3/volumes" Oct 03 14:20:02 crc kubenswrapper[4861]: I1003 14:20:02.040670 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerStarted","Data":"1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d"} Oct 03 14:20:02 crc kubenswrapper[4861]: I1003 14:20:02.058811 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k24bh" podStartSLOduration=3.603827968 podStartE2EDuration="13.058791556s" podCreationTimestamp="2025-10-03 14:19:49 +0000 UTC" firstStartedPulling="2025-10-03 14:19:51.939907937 +0000 UTC m=+2905.937892994" lastFinishedPulling="2025-10-03 14:20:01.394871545 +0000 UTC m=+2915.392856582" observedRunningTime="2025-10-03 14:20:02.057707437 +0000 UTC m=+2916.055692484" watchObservedRunningTime="2025-10-03 14:20:02.058791556 +0000 UTC m=+2916.056776603" Oct 03 14:20:10 crc kubenswrapper[4861]: I1003 14:20:10.198974 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:20:10 crc kubenswrapper[4861]: I1003 14:20:10.199643 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:20:10 crc kubenswrapper[4861]: I1003 14:20:10.247054 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:20:11 crc kubenswrapper[4861]: I1003 14:20:11.182059 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:20:11 crc kubenswrapper[4861]: I1003 14:20:11.237360 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k24bh"] Oct 03 14:20:12 crc kubenswrapper[4861]: I1003 14:20:12.681272 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:20:12 crc kubenswrapper[4861]: E1003 14:20:12.681790 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.143313 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k24bh" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="registry-server" containerID="cri-o://1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d" gracePeriod=2 Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.675633 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.827738 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-utilities\") pod \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.827860 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-catalog-content\") pod \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.827918 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzbf2\" (UniqueName: \"kubernetes.io/projected/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-kube-api-access-bzbf2\") pod \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\" (UID: \"5ae53bb5-e8b6-43e0-8432-2841f282ae1c\") " Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.828858 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-utilities" (OuterVolumeSpecName: "utilities") pod "5ae53bb5-e8b6-43e0-8432-2841f282ae1c" (UID: "5ae53bb5-e8b6-43e0-8432-2841f282ae1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.834167 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-kube-api-access-bzbf2" (OuterVolumeSpecName: "kube-api-access-bzbf2") pod "5ae53bb5-e8b6-43e0-8432-2841f282ae1c" (UID: "5ae53bb5-e8b6-43e0-8432-2841f282ae1c"). InnerVolumeSpecName "kube-api-access-bzbf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.874941 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ae53bb5-e8b6-43e0-8432-2841f282ae1c" (UID: "5ae53bb5-e8b6-43e0-8432-2841f282ae1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.930337 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzbf2\" (UniqueName: \"kubernetes.io/projected/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-kube-api-access-bzbf2\") on node \"crc\" DevicePath \"\"" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.930364 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:20:13 crc kubenswrapper[4861]: I1003 14:20:13.930374 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ae53bb5-e8b6-43e0-8432-2841f282ae1c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.152896 4861 generic.go:334] "Generic (PLEG): container finished" podID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerID="1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d" exitCode=0 Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.152955 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k24bh" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.152980 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerDied","Data":"1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d"} Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.153388 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k24bh" event={"ID":"5ae53bb5-e8b6-43e0-8432-2841f282ae1c","Type":"ContainerDied","Data":"518c1c76bc78be06f5f7ad531ab56a3615b20137fa1270f2694f747656351e32"} Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.153415 4861 scope.go:117] "RemoveContainer" containerID="1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.181425 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k24bh"] Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.194916 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k24bh"] Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.195328 4861 scope.go:117] "RemoveContainer" containerID="b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.213419 4861 scope.go:117] "RemoveContainer" containerID="f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.260777 4861 scope.go:117] "RemoveContainer" containerID="1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d" Oct 03 14:20:14 crc kubenswrapper[4861]: E1003 14:20:14.261190 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d\": container with ID starting with 1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d not found: ID does not exist" containerID="1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.261259 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d"} err="failed to get container status \"1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d\": rpc error: code = NotFound desc = could not find container \"1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d\": container with ID starting with 1bb2d9faa56b6e3e570fa0b8bbb291bceac110fa92b3cf0366de7bf30d101b4d not found: ID does not exist" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.261335 4861 scope.go:117] "RemoveContainer" containerID="b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981" Oct 03 14:20:14 crc kubenswrapper[4861]: E1003 14:20:14.265865 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981\": container with ID starting with b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981 not found: ID does not exist" containerID="b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.265929 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981"} err="failed to get container status \"b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981\": rpc error: code = NotFound desc = could not find container \"b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981\": container with ID starting with b0377d1804b8fae3927a69b40c2534a2aa1942527b972eb3952366b39ef89981 not found: ID does not exist" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.265960 4861 scope.go:117] "RemoveContainer" containerID="f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13" Oct 03 14:20:14 crc kubenswrapper[4861]: E1003 14:20:14.266437 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13\": container with ID starting with f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13 not found: ID does not exist" containerID="f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.266514 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13"} err="failed to get container status \"f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13\": rpc error: code = NotFound desc = could not find container \"f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13\": container with ID starting with f9fd549a924f4613fe40ca495ddf406950e5851f0b642a27c775491630490c13 not found: ID does not exist" Oct 03 14:20:14 crc kubenswrapper[4861]: I1003 14:20:14.695518 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" path="/var/lib/kubelet/pods/5ae53bb5-e8b6-43e0-8432-2841f282ae1c/volumes" Oct 03 14:20:24 crc kubenswrapper[4861]: I1003 14:20:24.682864 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:20:24 crc kubenswrapper[4861]: E1003 14:20:24.683428 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:20:36 crc kubenswrapper[4861]: I1003 14:20:36.689798 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:20:36 crc kubenswrapper[4861]: E1003 14:20:36.692252 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:20:51 crc kubenswrapper[4861]: I1003 14:20:51.681326 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:20:51 crc kubenswrapper[4861]: E1003 14:20:51.682187 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:21:02 crc kubenswrapper[4861]: I1003 14:21:02.681439 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:21:02 crc kubenswrapper[4861]: E1003 14:21:02.682500 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:21:14 crc kubenswrapper[4861]: I1003 14:21:14.681929 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:21:14 crc kubenswrapper[4861]: E1003 14:21:14.682804 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:21:25 crc kubenswrapper[4861]: I1003 14:21:25.682301 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:21:25 crc kubenswrapper[4861]: E1003 14:21:25.683144 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:21:40 crc kubenswrapper[4861]: I1003 14:21:40.681264 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:21:40 crc kubenswrapper[4861]: E1003 14:21:40.682053 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:21:51 crc kubenswrapper[4861]: I1003 14:21:51.682820 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:21:51 crc kubenswrapper[4861]: E1003 14:21:51.683854 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:22:02 crc kubenswrapper[4861]: I1003 14:22:02.683177 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:22:02 crc kubenswrapper[4861]: E1003 14:22:02.684141 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:22:14 crc kubenswrapper[4861]: I1003 14:22:14.681696 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:22:14 crc kubenswrapper[4861]: E1003 14:22:14.682564 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:22:28 crc kubenswrapper[4861]: I1003 14:22:28.681527 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:22:28 crc kubenswrapper[4861]: E1003 14:22:28.683918 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:22:39 crc kubenswrapper[4861]: I1003 14:22:39.681301 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:22:39 crc kubenswrapper[4861]: E1003 14:22:39.682410 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:22:47 crc kubenswrapper[4861]: I1003 14:22:47.521121 4861 generic.go:334] "Generic (PLEG): container finished" podID="3f63691e-8f0c-4494-a774-46fe7aaba3c9" containerID="b468b4d81948a9830b6d0f13c3a3d274c777270733c7e6b665b02a1c259987fd" exitCode=0 Oct 03 14:22:47 crc kubenswrapper[4861]: I1003 14:22:47.521599 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" event={"ID":"3f63691e-8f0c-4494-a774-46fe7aaba3c9","Type":"ContainerDied","Data":"b468b4d81948a9830b6d0f13c3a3d274c777270733c7e6b665b02a1c259987fd"} Oct 03 14:22:48 crc kubenswrapper[4861]: I1003 14:22:48.958723 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.112139 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ssh-key\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.112181 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-telemetry-combined-ca-bundle\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.112206 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-inventory\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.112288 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-544mw\" (UniqueName: \"kubernetes.io/projected/3f63691e-8f0c-4494-a774-46fe7aaba3c9-kube-api-access-544mw\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.112319 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-2\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.112345 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-1\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.113029 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-0\") pod \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\" (UID: \"3f63691e-8f0c-4494-a774-46fe7aaba3c9\") " Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.118039 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f63691e-8f0c-4494-a774-46fe7aaba3c9-kube-api-access-544mw" (OuterVolumeSpecName: "kube-api-access-544mw") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "kube-api-access-544mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.118732 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.146948 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-inventory" (OuterVolumeSpecName: "inventory") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.148049 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.149059 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.155137 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.157015 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "3f63691e-8f0c-4494-a774-46fe7aaba3c9" (UID: "3f63691e-8f0c-4494-a774-46fe7aaba3c9"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215542 4861 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215577 4861 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215587 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215596 4861 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215607 4861 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215616 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-544mw\" (UniqueName: \"kubernetes.io/projected/3f63691e-8f0c-4494-a774-46fe7aaba3c9-kube-api-access-544mw\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.215624 4861 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3f63691e-8f0c-4494-a774-46fe7aaba3c9-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.539547 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" event={"ID":"3f63691e-8f0c-4494-a774-46fe7aaba3c9","Type":"ContainerDied","Data":"909c3a6159e5b62f239241390b2872676c00042c4c2242e8530fcb38ec38de35"} Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.539596 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="909c3a6159e5b62f239241390b2872676c00042c4c2242e8530fcb38ec38de35" Oct 03 14:22:49 crc kubenswrapper[4861]: I1003 14:22:49.539595 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6" Oct 03 14:22:52 crc kubenswrapper[4861]: I1003 14:22:52.681283 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:22:52 crc kubenswrapper[4861]: E1003 14:22:52.682860 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:23:07 crc kubenswrapper[4861]: I1003 14:23:07.681859 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:23:07 crc kubenswrapper[4861]: E1003 14:23:07.682805 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:23:19 crc kubenswrapper[4861]: I1003 14:23:19.682528 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:23:19 crc kubenswrapper[4861]: E1003 14:23:19.684804 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:23:33 crc kubenswrapper[4861]: I1003 14:23:33.681119 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:23:33 crc kubenswrapper[4861]: E1003 14:23:33.682062 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.381190 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382067 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="extract-content" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382088 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="extract-content" Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382146 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="registry-server" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382157 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="registry-server" Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382174 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="extract-utilities" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382186 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="extract-utilities" Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382203 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="registry-server" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382214 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="registry-server" Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382313 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f63691e-8f0c-4494-a774-46fe7aaba3c9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382329 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f63691e-8f0c-4494-a774-46fe7aaba3c9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382346 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="extract-content" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382357 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="extract-content" Oct 03 14:23:35 crc kubenswrapper[4861]: E1003 14:23:35.382382 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="extract-utilities" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382393 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="extract-utilities" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382684 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae53bb5-e8b6-43e0-8432-2841f282ae1c" containerName="registry-server" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382718 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f63691e-8f0c-4494-a774-46fe7aaba3c9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.382748 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a87eeb-a396-43e2-9959-ecb9ad7a2ed3" containerName="registry-server" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.385092 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.389535 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.389770 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.389922 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.389934 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-829rv" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.391087 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534121 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnqjn\" (UniqueName: \"kubernetes.io/projected/a0580466-6d38-4ad0-a84e-dcf312f06369-kube-api-access-cnqjn\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534248 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534283 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534318 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534410 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534463 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534513 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534544 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.534587 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-config-data\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.636583 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.636987 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.637262 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.637416 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.637102 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.637817 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.638080 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.638418 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.638691 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.638495 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.638962 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-config-data\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.639349 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.639793 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnqjn\" (UniqueName: \"kubernetes.io/projected/a0580466-6d38-4ad0-a84e-dcf312f06369-kube-api-access-cnqjn\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.640036 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-config-data\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.644993 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.645200 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.647281 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.657103 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnqjn\" (UniqueName: \"kubernetes.io/projected/a0580466-6d38-4ad0-a84e-dcf312f06369-kube-api-access-cnqjn\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.687016 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " pod="openstack/tempest-tests-tempest" Oct 03 14:23:35 crc kubenswrapper[4861]: I1003 14:23:35.714817 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 14:23:36 crc kubenswrapper[4861]: I1003 14:23:36.182683 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 03 14:23:36 crc kubenswrapper[4861]: I1003 14:23:36.187203 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:23:36 crc kubenswrapper[4861]: I1003 14:23:36.990360 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a0580466-6d38-4ad0-a84e-dcf312f06369","Type":"ContainerStarted","Data":"3467008d8ee388ac6c2e863c527e12c297105e11a2602600f2bf2f43a4703954"} Oct 03 14:23:47 crc kubenswrapper[4861]: I1003 14:23:47.681797 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:23:47 crc kubenswrapper[4861]: E1003 14:23:47.682564 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:23:58 crc kubenswrapper[4861]: I1003 14:23:58.681065 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:23:58 crc kubenswrapper[4861]: E1003 14:23:58.682161 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:24:13 crc kubenswrapper[4861]: I1003 14:24:13.681602 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:24:13 crc kubenswrapper[4861]: E1003 14:24:13.682489 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:24:24 crc kubenswrapper[4861]: E1003 14:24:24.968836 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 03 14:24:24 crc kubenswrapper[4861]: E1003 14:24:24.971223 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cnqjn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(a0580466-6d38-4ad0-a84e-dcf312f06369): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 14:24:24 crc kubenswrapper[4861]: E1003 14:24:24.972433 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="a0580466-6d38-4ad0-a84e-dcf312f06369" Oct 03 14:24:25 crc kubenswrapper[4861]: E1003 14:24:25.450726 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="a0580466-6d38-4ad0-a84e-dcf312f06369" Oct 03 14:24:28 crc kubenswrapper[4861]: I1003 14:24:28.681175 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:24:28 crc kubenswrapper[4861]: E1003 14:24:28.681700 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:24:42 crc kubenswrapper[4861]: I1003 14:24:42.252729 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 03 14:24:42 crc kubenswrapper[4861]: I1003 14:24:42.681492 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:24:43 crc kubenswrapper[4861]: I1003 14:24:43.603798 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"a705db865e2b9f8f66c5b6f71ab61d8b57fb8b7e0d0df1c91ce66057efed7c07"} Oct 03 14:24:44 crc kubenswrapper[4861]: I1003 14:24:44.613406 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a0580466-6d38-4ad0-a84e-dcf312f06369","Type":"ContainerStarted","Data":"3afac97c25d4f292e6b4e9f0b740bdb1ac01e8671d3b175e72d784c36616e72f"} Oct 03 14:24:44 crc kubenswrapper[4861]: I1003 14:24:44.634948 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.572351136 podStartE2EDuration="1m10.634931029s" podCreationTimestamp="2025-10-03 14:23:34 +0000 UTC" firstStartedPulling="2025-10-03 14:23:36.18701758 +0000 UTC m=+3130.185002627" lastFinishedPulling="2025-10-03 14:24:42.249597473 +0000 UTC m=+3196.247582520" observedRunningTime="2025-10-03 14:24:44.627901771 +0000 UTC m=+3198.625886818" watchObservedRunningTime="2025-10-03 14:24:44.634931029 +0000 UTC m=+3198.632916076" Oct 03 14:25:59 crc kubenswrapper[4861]: I1003 14:25:59.190280 4861 scope.go:117] "RemoveContainer" containerID="58c4ead175adec9d270e5379e50d77bed057693d2f4f0965de0fe210a4e6026a" Oct 03 14:25:59 crc kubenswrapper[4861]: I1003 14:25:59.232416 4861 scope.go:117] "RemoveContainer" containerID="03f5ff68b52c357bbb9d9d6607b45abe92a0f1f31a006cba18eb7582e203a44e" Oct 03 14:25:59 crc kubenswrapper[4861]: I1003 14:25:59.300447 4861 scope.go:117] "RemoveContainer" containerID="603886c1fa7045d8d843d06592c80712f53efc616462681a9386082683d3c600" Oct 03 14:27:00 crc kubenswrapper[4861]: I1003 14:27:00.144895 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:27:00 crc kubenswrapper[4861]: I1003 14:27:00.145452 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:27:30 crc kubenswrapper[4861]: I1003 14:27:30.145401 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:27:30 crc kubenswrapper[4861]: I1003 14:27:30.145807 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.145203 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.145862 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.145918 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.146835 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a705db865e2b9f8f66c5b6f71ab61d8b57fb8b7e0d0df1c91ce66057efed7c07"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.146906 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://a705db865e2b9f8f66c5b6f71ab61d8b57fb8b7e0d0df1c91ce66057efed7c07" gracePeriod=600 Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.382955 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="a705db865e2b9f8f66c5b6f71ab61d8b57fb8b7e0d0df1c91ce66057efed7c07" exitCode=0 Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.383005 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"a705db865e2b9f8f66c5b6f71ab61d8b57fb8b7e0d0df1c91ce66057efed7c07"} Oct 03 14:28:00 crc kubenswrapper[4861]: I1003 14:28:00.383333 4861 scope.go:117] "RemoveContainer" containerID="7df9abfdf812def04452ad11556b9675c4769383aff2ceeff90ba8cf9ea0a319" Oct 03 14:28:01 crc kubenswrapper[4861]: I1003 14:28:01.394876 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5"} Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.141192 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dpxn2"] Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.143610 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.161822 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dpxn2"] Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.259539 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-utilities\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.259845 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-catalog-content\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.260085 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjh5h\" (UniqueName: \"kubernetes.io/projected/e0663576-2a3e-425d-b34e-448b12474f6a-kube-api-access-qjh5h\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.361774 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjh5h\" (UniqueName: \"kubernetes.io/projected/e0663576-2a3e-425d-b34e-448b12474f6a-kube-api-access-qjh5h\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.362157 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-utilities\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.362334 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-catalog-content\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.362692 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-utilities\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.362887 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-catalog-content\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.381096 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjh5h\" (UniqueName: \"kubernetes.io/projected/e0663576-2a3e-425d-b34e-448b12474f6a-kube-api-access-qjh5h\") pod \"redhat-operators-dpxn2\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:27 crc kubenswrapper[4861]: I1003 14:28:27.464990 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:28 crc kubenswrapper[4861]: I1003 14:28:28.698841 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dpxn2"] Oct 03 14:28:29 crc kubenswrapper[4861]: I1003 14:28:29.652330 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerStarted","Data":"559791f73c423258a1cb310c30636574cc54877e10af81b098a0db3a27d8ccc2"} Oct 03 14:28:29 crc kubenswrapper[4861]: I1003 14:28:29.652662 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerStarted","Data":"3e1f3656c17b2dc2646c8dcb92db7ebc31b8892e387f287174583bd629410f69"} Oct 03 14:28:30 crc kubenswrapper[4861]: I1003 14:28:30.661160 4861 generic.go:334] "Generic (PLEG): container finished" podID="e0663576-2a3e-425d-b34e-448b12474f6a" containerID="559791f73c423258a1cb310c30636574cc54877e10af81b098a0db3a27d8ccc2" exitCode=0 Oct 03 14:28:30 crc kubenswrapper[4861]: I1003 14:28:30.661280 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerDied","Data":"559791f73c423258a1cb310c30636574cc54877e10af81b098a0db3a27d8ccc2"} Oct 03 14:28:33 crc kubenswrapper[4861]: I1003 14:28:33.691528 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerStarted","Data":"cb59e8a2fd7be4e5b7ff761817477902e4da93e64e82d8796c6f6c86ae4bf0c2"} Oct 03 14:28:55 crc kubenswrapper[4861]: I1003 14:28:55.899210 4861 generic.go:334] "Generic (PLEG): container finished" podID="e0663576-2a3e-425d-b34e-448b12474f6a" containerID="cb59e8a2fd7be4e5b7ff761817477902e4da93e64e82d8796c6f6c86ae4bf0c2" exitCode=0 Oct 03 14:28:55 crc kubenswrapper[4861]: I1003 14:28:55.899774 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerDied","Data":"cb59e8a2fd7be4e5b7ff761817477902e4da93e64e82d8796c6f6c86ae4bf0c2"} Oct 03 14:28:55 crc kubenswrapper[4861]: I1003 14:28:55.904952 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:28:56 crc kubenswrapper[4861]: I1003 14:28:56.910510 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerStarted","Data":"166f5f2281a1ba2880f352a2b268b983b2216519abf28d25a7a0089f56b4a5b9"} Oct 03 14:28:56 crc kubenswrapper[4861]: I1003 14:28:56.927083 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dpxn2" podStartSLOduration=4.228642717 podStartE2EDuration="29.927061046s" podCreationTimestamp="2025-10-03 14:28:27 +0000 UTC" firstStartedPulling="2025-10-03 14:28:30.664946002 +0000 UTC m=+3424.662931049" lastFinishedPulling="2025-10-03 14:28:56.363364331 +0000 UTC m=+3450.361349378" observedRunningTime="2025-10-03 14:28:56.925673089 +0000 UTC m=+3450.923658136" watchObservedRunningTime="2025-10-03 14:28:56.927061046 +0000 UTC m=+3450.925046093" Oct 03 14:28:57 crc kubenswrapper[4861]: I1003 14:28:57.465392 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:57 crc kubenswrapper[4861]: I1003 14:28:57.465899 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:28:58 crc kubenswrapper[4861]: I1003 14:28:58.514412 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:28:58 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:28:58 crc kubenswrapper[4861]: > Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.052886 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9qfbm"] Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.060823 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.125112 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qfbm"] Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.179411 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-catalog-content\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.179800 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf5nq\" (UniqueName: \"kubernetes.io/projected/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-kube-api-access-vf5nq\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.179827 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-utilities\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.281519 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-catalog-content\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.281581 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf5nq\" (UniqueName: \"kubernetes.io/projected/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-kube-api-access-vf5nq\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.281605 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-utilities\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.283108 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-utilities\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.283194 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-catalog-content\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.319039 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf5nq\" (UniqueName: \"kubernetes.io/projected/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-kube-api-access-vf5nq\") pod \"redhat-marketplace-9qfbm\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:05 crc kubenswrapper[4861]: I1003 14:29:05.408507 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:06 crc kubenswrapper[4861]: I1003 14:29:06.060629 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qfbm"] Oct 03 14:29:07 crc kubenswrapper[4861]: I1003 14:29:07.002606 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerID="44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3" exitCode=0 Oct 03 14:29:07 crc kubenswrapper[4861]: I1003 14:29:07.002730 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerDied","Data":"44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3"} Oct 03 14:29:07 crc kubenswrapper[4861]: I1003 14:29:07.003787 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerStarted","Data":"adb4198230a33bac4b72717b0fece91f134bcadf55f6be663647f7dba206dbdb"} Oct 03 14:29:08 crc kubenswrapper[4861]: I1003 14:29:08.516780 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:08 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:08 crc kubenswrapper[4861]: > Oct 03 14:29:09 crc kubenswrapper[4861]: I1003 14:29:09.023808 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerStarted","Data":"a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299"} Oct 03 14:29:13 crc kubenswrapper[4861]: I1003 14:29:13.059847 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerID="a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299" exitCode=0 Oct 03 14:29:13 crc kubenswrapper[4861]: I1003 14:29:13.059917 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerDied","Data":"a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299"} Oct 03 14:29:15 crc kubenswrapper[4861]: I1003 14:29:15.083112 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerStarted","Data":"8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7"} Oct 03 14:29:15 crc kubenswrapper[4861]: I1003 14:29:15.409306 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:15 crc kubenswrapper[4861]: I1003 14:29:15.409373 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:16 crc kubenswrapper[4861]: I1003 14:29:16.453782 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-9qfbm" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:16 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:16 crc kubenswrapper[4861]: > Oct 03 14:29:18 crc kubenswrapper[4861]: I1003 14:29:18.513839 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:18 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:18 crc kubenswrapper[4861]: > Oct 03 14:29:25 crc kubenswrapper[4861]: I1003 14:29:25.455807 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:25 crc kubenswrapper[4861]: I1003 14:29:25.497507 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9qfbm" podStartSLOduration=13.522841744 podStartE2EDuration="20.497488263s" podCreationTimestamp="2025-10-03 14:29:05 +0000 UTC" firstStartedPulling="2025-10-03 14:29:07.006818205 +0000 UTC m=+3461.004803252" lastFinishedPulling="2025-10-03 14:29:13.981464724 +0000 UTC m=+3467.979449771" observedRunningTime="2025-10-03 14:29:15.108360792 +0000 UTC m=+3469.106345839" watchObservedRunningTime="2025-10-03 14:29:25.497488263 +0000 UTC m=+3479.495473320" Oct 03 14:29:25 crc kubenswrapper[4861]: I1003 14:29:25.524092 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:25 crc kubenswrapper[4861]: I1003 14:29:25.707085 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qfbm"] Oct 03 14:29:27 crc kubenswrapper[4861]: I1003 14:29:27.188833 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9qfbm" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="registry-server" containerID="cri-o://8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7" gracePeriod=2 Oct 03 14:29:27 crc kubenswrapper[4861]: I1003 14:29:27.965222 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.064443 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-catalog-content\") pod \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.064632 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-utilities\") pod \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.064735 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf5nq\" (UniqueName: \"kubernetes.io/projected/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-kube-api-access-vf5nq\") pod \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\" (UID: \"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115\") " Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.067004 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-utilities" (OuterVolumeSpecName: "utilities") pod "d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" (UID: "d8f54757-b5b3-4cdc-8bf9-5151ce7d8115"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.074391 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-kube-api-access-vf5nq" (OuterVolumeSpecName: "kube-api-access-vf5nq") pod "d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" (UID: "d8f54757-b5b3-4cdc-8bf9-5151ce7d8115"). InnerVolumeSpecName "kube-api-access-vf5nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.081098 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" (UID: "d8f54757-b5b3-4cdc-8bf9-5151ce7d8115"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.166308 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf5nq\" (UniqueName: \"kubernetes.io/projected/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-kube-api-access-vf5nq\") on node \"crc\" DevicePath \"\"" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.166339 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.166348 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.209471 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerID="8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7" exitCode=0 Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.209514 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerDied","Data":"8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7"} Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.209540 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qfbm" event={"ID":"d8f54757-b5b3-4cdc-8bf9-5151ce7d8115","Type":"ContainerDied","Data":"adb4198230a33bac4b72717b0fece91f134bcadf55f6be663647f7dba206dbdb"} Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.209556 4861 scope.go:117] "RemoveContainer" containerID="8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.209680 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qfbm" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.243468 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qfbm"] Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.254635 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qfbm"] Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.256399 4861 scope.go:117] "RemoveContainer" containerID="a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.275182 4861 scope.go:117] "RemoveContainer" containerID="44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.321004 4861 scope.go:117] "RemoveContainer" containerID="8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7" Oct 03 14:29:28 crc kubenswrapper[4861]: E1003 14:29:28.322052 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7\": container with ID starting with 8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7 not found: ID does not exist" containerID="8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.322085 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7"} err="failed to get container status \"8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7\": rpc error: code = NotFound desc = could not find container \"8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7\": container with ID starting with 8b08f36b3b1622cf37207135588c8f319cc69d5478beb308db838894a455c9f7 not found: ID does not exist" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.322112 4861 scope.go:117] "RemoveContainer" containerID="a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299" Oct 03 14:29:28 crc kubenswrapper[4861]: E1003 14:29:28.322425 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299\": container with ID starting with a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299 not found: ID does not exist" containerID="a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.322526 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299"} err="failed to get container status \"a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299\": rpc error: code = NotFound desc = could not find container \"a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299\": container with ID starting with a6c7cdf1cd2d9b4d7d9a5b59c98eceffc3e95d668848c7c999676d274b178299 not found: ID does not exist" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.322645 4861 scope.go:117] "RemoveContainer" containerID="44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3" Oct 03 14:29:28 crc kubenswrapper[4861]: E1003 14:29:28.323134 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3\": container with ID starting with 44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3 not found: ID does not exist" containerID="44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.323187 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3"} err="failed to get container status \"44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3\": rpc error: code = NotFound desc = could not find container \"44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3\": container with ID starting with 44ca75c8d87ceb1ab5209282bce20af18b16957f3d3db884c7846394d6e9eea3 not found: ID does not exist" Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.524193 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:28 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:28 crc kubenswrapper[4861]: > Oct 03 14:29:28 crc kubenswrapper[4861]: I1003 14:29:28.694116 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" path="/var/lib/kubelet/pods/d8f54757-b5b3-4cdc-8bf9-5151ce7d8115/volumes" Oct 03 14:29:38 crc kubenswrapper[4861]: I1003 14:29:38.518093 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:38 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:38 crc kubenswrapper[4861]: > Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.725819 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-62mwv"] Oct 03 14:29:47 crc kubenswrapper[4861]: E1003 14:29:47.726891 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="extract-content" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.726911 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="extract-content" Oct 03 14:29:47 crc kubenswrapper[4861]: E1003 14:29:47.726936 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="extract-utilities" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.726946 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="extract-utilities" Oct 03 14:29:47 crc kubenswrapper[4861]: E1003 14:29:47.726957 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="registry-server" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.726964 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="registry-server" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.727258 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8f54757-b5b3-4cdc-8bf9-5151ce7d8115" containerName="registry-server" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.732869 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.740787 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-62mwv"] Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.849355 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-utilities\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.849480 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-catalog-content\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.849554 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m66qt\" (UniqueName: \"kubernetes.io/projected/6f55824c-2b72-49cf-b866-c4b3668e20e2-kube-api-access-m66qt\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.950887 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m66qt\" (UniqueName: \"kubernetes.io/projected/6f55824c-2b72-49cf-b866-c4b3668e20e2-kube-api-access-m66qt\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.951010 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-utilities\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.951083 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-catalog-content\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.951516 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-utilities\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.951560 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-catalog-content\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:47 crc kubenswrapper[4861]: I1003 14:29:47.983481 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m66qt\" (UniqueName: \"kubernetes.io/projected/6f55824c-2b72-49cf-b866-c4b3668e20e2-kube-api-access-m66qt\") pod \"community-operators-62mwv\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:48 crc kubenswrapper[4861]: I1003 14:29:48.057934 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:29:48 crc kubenswrapper[4861]: I1003 14:29:48.519689 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:48 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:48 crc kubenswrapper[4861]: > Oct 03 14:29:48 crc kubenswrapper[4861]: I1003 14:29:48.724209 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-62mwv"] Oct 03 14:29:49 crc kubenswrapper[4861]: I1003 14:29:49.443577 4861 generic.go:334] "Generic (PLEG): container finished" podID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerID="22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b" exitCode=0 Oct 03 14:29:49 crc kubenswrapper[4861]: I1003 14:29:49.443621 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerDied","Data":"22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b"} Oct 03 14:29:49 crc kubenswrapper[4861]: I1003 14:29:49.443849 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerStarted","Data":"7ba0cc499b4b823d8601667ff0a50f16ec7d97457f4efafb0f4bf1e709456279"} Oct 03 14:29:56 crc kubenswrapper[4861]: I1003 14:29:56.504707 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerStarted","Data":"3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2"} Oct 03 14:29:58 crc kubenswrapper[4861]: I1003 14:29:58.514312 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:29:58 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:29:58 crc kubenswrapper[4861]: > Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.180665 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw"] Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.182268 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.184594 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.184903 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.197827 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw"] Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.289674 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj77c\" (UniqueName: \"kubernetes.io/projected/b7864b04-4056-48ca-bfc7-cebbcb70128f-kube-api-access-fj77c\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.290291 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7864b04-4056-48ca-bfc7-cebbcb70128f-secret-volume\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.290366 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7864b04-4056-48ca-bfc7-cebbcb70128f-config-volume\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.392002 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj77c\" (UniqueName: \"kubernetes.io/projected/b7864b04-4056-48ca-bfc7-cebbcb70128f-kube-api-access-fj77c\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.392093 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7864b04-4056-48ca-bfc7-cebbcb70128f-secret-volume\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.392173 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7864b04-4056-48ca-bfc7-cebbcb70128f-config-volume\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.393115 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7864b04-4056-48ca-bfc7-cebbcb70128f-config-volume\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.397831 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7864b04-4056-48ca-bfc7-cebbcb70128f-secret-volume\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.411313 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj77c\" (UniqueName: \"kubernetes.io/projected/b7864b04-4056-48ca-bfc7-cebbcb70128f-kube-api-access-fj77c\") pod \"collect-profiles-29325030-7wlmw\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:00 crc kubenswrapper[4861]: I1003 14:30:00.503099 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:01 crc kubenswrapper[4861]: I1003 14:30:01.296865 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw"] Oct 03 14:30:01 crc kubenswrapper[4861]: I1003 14:30:01.552259 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" event={"ID":"b7864b04-4056-48ca-bfc7-cebbcb70128f","Type":"ContainerStarted","Data":"d21c00e64a04d09f2d722eada767515fea1722ce08ec73d7948fdafc28025d46"} Oct 03 14:30:03 crc kubenswrapper[4861]: I1003 14:30:03.572332 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" event={"ID":"b7864b04-4056-48ca-bfc7-cebbcb70128f","Type":"ContainerStarted","Data":"6e24f0adcb462c706aceee469cc25c4cf094451ddcfaf8d4fcf4f95595f010b3"} Oct 03 14:30:03 crc kubenswrapper[4861]: I1003 14:30:03.599079 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" podStartSLOduration=3.599046917 podStartE2EDuration="3.599046917s" podCreationTimestamp="2025-10-03 14:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:30:03.588637308 +0000 UTC m=+3517.586622415" watchObservedRunningTime="2025-10-03 14:30:03.599046917 +0000 UTC m=+3517.597032004" Oct 03 14:30:04 crc kubenswrapper[4861]: E1003 14:30:04.604137 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7864b04_4056_48ca_bfc7_cebbcb70128f.slice/crio-6e24f0adcb462c706aceee469cc25c4cf094451ddcfaf8d4fcf4f95595f010b3.scope\": RecentStats: unable to find data in memory cache]" Oct 03 14:30:05 crc kubenswrapper[4861]: I1003 14:30:05.592041 4861 generic.go:334] "Generic (PLEG): container finished" podID="b7864b04-4056-48ca-bfc7-cebbcb70128f" containerID="6e24f0adcb462c706aceee469cc25c4cf094451ddcfaf8d4fcf4f95595f010b3" exitCode=0 Oct 03 14:30:05 crc kubenswrapper[4861]: I1003 14:30:05.592135 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" event={"ID":"b7864b04-4056-48ca-bfc7-cebbcb70128f","Type":"ContainerDied","Data":"6e24f0adcb462c706aceee469cc25c4cf094451ddcfaf8d4fcf4f95595f010b3"} Oct 03 14:30:05 crc kubenswrapper[4861]: I1003 14:30:05.595643 4861 generic.go:334] "Generic (PLEG): container finished" podID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerID="3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2" exitCode=0 Oct 03 14:30:05 crc kubenswrapper[4861]: I1003 14:30:05.595676 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerDied","Data":"3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2"} Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.307021 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.436602 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7864b04-4056-48ca-bfc7-cebbcb70128f-secret-volume\") pod \"b7864b04-4056-48ca-bfc7-cebbcb70128f\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.436669 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7864b04-4056-48ca-bfc7-cebbcb70128f-config-volume\") pod \"b7864b04-4056-48ca-bfc7-cebbcb70128f\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.436718 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj77c\" (UniqueName: \"kubernetes.io/projected/b7864b04-4056-48ca-bfc7-cebbcb70128f-kube-api-access-fj77c\") pod \"b7864b04-4056-48ca-bfc7-cebbcb70128f\" (UID: \"b7864b04-4056-48ca-bfc7-cebbcb70128f\") " Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.437525 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7864b04-4056-48ca-bfc7-cebbcb70128f-config-volume" (OuterVolumeSpecName: "config-volume") pod "b7864b04-4056-48ca-bfc7-cebbcb70128f" (UID: "b7864b04-4056-48ca-bfc7-cebbcb70128f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.438521 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7864b04-4056-48ca-bfc7-cebbcb70128f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.444159 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7864b04-4056-48ca-bfc7-cebbcb70128f-kube-api-access-fj77c" (OuterVolumeSpecName: "kube-api-access-fj77c") pod "b7864b04-4056-48ca-bfc7-cebbcb70128f" (UID: "b7864b04-4056-48ca-bfc7-cebbcb70128f"). InnerVolumeSpecName "kube-api-access-fj77c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.457025 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7864b04-4056-48ca-bfc7-cebbcb70128f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b7864b04-4056-48ca-bfc7-cebbcb70128f" (UID: "b7864b04-4056-48ca-bfc7-cebbcb70128f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.541199 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7864b04-4056-48ca-bfc7-cebbcb70128f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.541264 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj77c\" (UniqueName: \"kubernetes.io/projected/b7864b04-4056-48ca-bfc7-cebbcb70128f-kube-api-access-fj77c\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.619773 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerStarted","Data":"fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5"} Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.622850 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" event={"ID":"b7864b04-4056-48ca-bfc7-cebbcb70128f","Type":"ContainerDied","Data":"d21c00e64a04d09f2d722eada767515fea1722ce08ec73d7948fdafc28025d46"} Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.622894 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d21c00e64a04d09f2d722eada767515fea1722ce08ec73d7948fdafc28025d46" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.622955 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325030-7wlmw" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.661902 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-62mwv" podStartSLOduration=3.222130169 podStartE2EDuration="20.661882919s" podCreationTimestamp="2025-10-03 14:29:47 +0000 UTC" firstStartedPulling="2025-10-03 14:29:49.445596895 +0000 UTC m=+3503.443581942" lastFinishedPulling="2025-10-03 14:30:06.885349655 +0000 UTC m=+3520.883334692" observedRunningTime="2025-10-03 14:30:07.640547916 +0000 UTC m=+3521.638532953" watchObservedRunningTime="2025-10-03 14:30:07.661882919 +0000 UTC m=+3521.659867966" Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.698446 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz"] Oct 03 14:30:07 crc kubenswrapper[4861]: I1003 14:30:07.706450 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-v2nnz"] Oct 03 14:30:08 crc kubenswrapper[4861]: I1003 14:30:08.059195 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:30:08 crc kubenswrapper[4861]: I1003 14:30:08.059282 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:30:08 crc kubenswrapper[4861]: I1003 14:30:08.516356 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:30:08 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:30:08 crc kubenswrapper[4861]: > Oct 03 14:30:08 crc kubenswrapper[4861]: I1003 14:30:08.701513 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a77c936-63c4-4e96-aa36-7616663f398e" path="/var/lib/kubelet/pods/5a77c936-63c4-4e96-aa36-7616663f398e/volumes" Oct 03 14:30:09 crc kubenswrapper[4861]: I1003 14:30:09.115929 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-62mwv" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="registry-server" probeResult="failure" output=< Oct 03 14:30:09 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:30:09 crc kubenswrapper[4861]: > Oct 03 14:30:17 crc kubenswrapper[4861]: I1003 14:30:17.532347 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:30:17 crc kubenswrapper[4861]: I1003 14:30:17.586585 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:30:17 crc kubenswrapper[4861]: I1003 14:30:17.773394 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dpxn2"] Oct 03 14:30:18 crc kubenswrapper[4861]: I1003 14:30:18.719664 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dpxn2" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" containerID="cri-o://166f5f2281a1ba2880f352a2b268b983b2216519abf28d25a7a0089f56b4a5b9" gracePeriod=2 Oct 03 14:30:19 crc kubenswrapper[4861]: I1003 14:30:19.103053 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-62mwv" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="registry-server" probeResult="failure" output=< Oct 03 14:30:19 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:30:19 crc kubenswrapper[4861]: > Oct 03 14:30:19 crc kubenswrapper[4861]: I1003 14:30:19.735030 4861 generic.go:334] "Generic (PLEG): container finished" podID="e0663576-2a3e-425d-b34e-448b12474f6a" containerID="166f5f2281a1ba2880f352a2b268b983b2216519abf28d25a7a0089f56b4a5b9" exitCode=0 Oct 03 14:30:19 crc kubenswrapper[4861]: I1003 14:30:19.735270 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerDied","Data":"166f5f2281a1ba2880f352a2b268b983b2216519abf28d25a7a0089f56b4a5b9"} Oct 03 14:30:19 crc kubenswrapper[4861]: I1003 14:30:19.969188 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.086952 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-catalog-content\") pod \"e0663576-2a3e-425d-b34e-448b12474f6a\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.087271 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjh5h\" (UniqueName: \"kubernetes.io/projected/e0663576-2a3e-425d-b34e-448b12474f6a-kube-api-access-qjh5h\") pod \"e0663576-2a3e-425d-b34e-448b12474f6a\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.087495 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-utilities\") pod \"e0663576-2a3e-425d-b34e-448b12474f6a\" (UID: \"e0663576-2a3e-425d-b34e-448b12474f6a\") " Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.088028 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-utilities" (OuterVolumeSpecName: "utilities") pod "e0663576-2a3e-425d-b34e-448b12474f6a" (UID: "e0663576-2a3e-425d-b34e-448b12474f6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.110256 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0663576-2a3e-425d-b34e-448b12474f6a-kube-api-access-qjh5h" (OuterVolumeSpecName: "kube-api-access-qjh5h") pod "e0663576-2a3e-425d-b34e-448b12474f6a" (UID: "e0663576-2a3e-425d-b34e-448b12474f6a"). InnerVolumeSpecName "kube-api-access-qjh5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.189281 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjh5h\" (UniqueName: \"kubernetes.io/projected/e0663576-2a3e-425d-b34e-448b12474f6a-kube-api-access-qjh5h\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.189324 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.232248 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0663576-2a3e-425d-b34e-448b12474f6a" (UID: "e0663576-2a3e-425d-b34e-448b12474f6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.290608 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0663576-2a3e-425d-b34e-448b12474f6a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.747576 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dpxn2" event={"ID":"e0663576-2a3e-425d-b34e-448b12474f6a","Type":"ContainerDied","Data":"3e1f3656c17b2dc2646c8dcb92db7ebc31b8892e387f287174583bd629410f69"} Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.747632 4861 scope.go:117] "RemoveContainer" containerID="166f5f2281a1ba2880f352a2b268b983b2216519abf28d25a7a0089f56b4a5b9" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.747736 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dpxn2" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.779908 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dpxn2"] Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.783685 4861 scope.go:117] "RemoveContainer" containerID="cb59e8a2fd7be4e5b7ff761817477902e4da93e64e82d8796c6f6c86ae4bf0c2" Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.791351 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dpxn2"] Oct 03 14:30:20 crc kubenswrapper[4861]: I1003 14:30:20.824825 4861 scope.go:117] "RemoveContainer" containerID="559791f73c423258a1cb310c30636574cc54877e10af81b098a0db3a27d8ccc2" Oct 03 14:30:22 crc kubenswrapper[4861]: I1003 14:30:22.693051 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" path="/var/lib/kubelet/pods/e0663576-2a3e-425d-b34e-448b12474f6a/volumes" Oct 03 14:30:28 crc kubenswrapper[4861]: I1003 14:30:28.111274 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:30:28 crc kubenswrapper[4861]: I1003 14:30:28.166201 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:30:29 crc kubenswrapper[4861]: I1003 14:30:29.138467 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-62mwv"] Oct 03 14:30:29 crc kubenswrapper[4861]: I1003 14:30:29.828367 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-62mwv" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="registry-server" containerID="cri-o://fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5" gracePeriod=2 Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.145020 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.145447 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.613414 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.691913 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-catalog-content\") pod \"6f55824c-2b72-49cf-b866-c4b3668e20e2\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.691980 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m66qt\" (UniqueName: \"kubernetes.io/projected/6f55824c-2b72-49cf-b866-c4b3668e20e2-kube-api-access-m66qt\") pod \"6f55824c-2b72-49cf-b866-c4b3668e20e2\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.692207 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-utilities\") pod \"6f55824c-2b72-49cf-b866-c4b3668e20e2\" (UID: \"6f55824c-2b72-49cf-b866-c4b3668e20e2\") " Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.694199 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-utilities" (OuterVolumeSpecName: "utilities") pod "6f55824c-2b72-49cf-b866-c4b3668e20e2" (UID: "6f55824c-2b72-49cf-b866-c4b3668e20e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.723189 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f55824c-2b72-49cf-b866-c4b3668e20e2-kube-api-access-m66qt" (OuterVolumeSpecName: "kube-api-access-m66qt") pod "6f55824c-2b72-49cf-b866-c4b3668e20e2" (UID: "6f55824c-2b72-49cf-b866-c4b3668e20e2"). InnerVolumeSpecName "kube-api-access-m66qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.779725 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f55824c-2b72-49cf-b866-c4b3668e20e2" (UID: "6f55824c-2b72-49cf-b866-c4b3668e20e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.794795 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.794987 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f55824c-2b72-49cf-b866-c4b3668e20e2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.795049 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m66qt\" (UniqueName: \"kubernetes.io/projected/6f55824c-2b72-49cf-b866-c4b3668e20e2-kube-api-access-m66qt\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.838863 4861 generic.go:334] "Generic (PLEG): container finished" podID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerID="fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5" exitCode=0 Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.838904 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerDied","Data":"fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5"} Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.838931 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-62mwv" event={"ID":"6f55824c-2b72-49cf-b866-c4b3668e20e2","Type":"ContainerDied","Data":"7ba0cc499b4b823d8601667ff0a50f16ec7d97457f4efafb0f4bf1e709456279"} Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.838948 4861 scope.go:117] "RemoveContainer" containerID="fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.838994 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-62mwv" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.880814 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-62mwv"] Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.883890 4861 scope.go:117] "RemoveContainer" containerID="3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.889309 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-62mwv"] Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.919420 4861 scope.go:117] "RemoveContainer" containerID="22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.952347 4861 scope.go:117] "RemoveContainer" containerID="fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5" Oct 03 14:30:30 crc kubenswrapper[4861]: E1003 14:30:30.953097 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5\": container with ID starting with fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5 not found: ID does not exist" containerID="fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.953135 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5"} err="failed to get container status \"fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5\": rpc error: code = NotFound desc = could not find container \"fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5\": container with ID starting with fe0aef0493976eabe6d00fcd45b2eee0d44e979a4ef33fec165ed6038dec1aa5 not found: ID does not exist" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.953162 4861 scope.go:117] "RemoveContainer" containerID="3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2" Oct 03 14:30:30 crc kubenswrapper[4861]: E1003 14:30:30.953456 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2\": container with ID starting with 3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2 not found: ID does not exist" containerID="3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.953482 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2"} err="failed to get container status \"3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2\": rpc error: code = NotFound desc = could not find container \"3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2\": container with ID starting with 3e4a48bd996087135891dec490bd52968e3c581e409cd2e830cc44740045b9b2 not found: ID does not exist" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.953503 4861 scope.go:117] "RemoveContainer" containerID="22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b" Oct 03 14:30:30 crc kubenswrapper[4861]: E1003 14:30:30.953819 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b\": container with ID starting with 22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b not found: ID does not exist" containerID="22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b" Oct 03 14:30:30 crc kubenswrapper[4861]: I1003 14:30:30.953842 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b"} err="failed to get container status \"22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b\": rpc error: code = NotFound desc = could not find container \"22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b\": container with ID starting with 22f027ed9eb0700b811761930824ca369cbb3be24f4d87243ccaf161d902815b not found: ID does not exist" Oct 03 14:30:32 crc kubenswrapper[4861]: I1003 14:30:32.691753 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" path="/var/lib/kubelet/pods/6f55824c-2b72-49cf-b866-c4b3668e20e2/volumes" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.346009 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-md5vx"] Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347024 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="extract-utilities" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347037 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="extract-utilities" Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347046 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347052 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347072 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="extract-utilities" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347080 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="extract-utilities" Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347094 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="extract-content" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347100 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="extract-content" Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347120 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7864b04-4056-48ca-bfc7-cebbcb70128f" containerName="collect-profiles" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347126 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7864b04-4056-48ca-bfc7-cebbcb70128f" containerName="collect-profiles" Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347140 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="extract-content" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347146 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="extract-content" Oct 03 14:30:41 crc kubenswrapper[4861]: E1003 14:30:41.347154 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="registry-server" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347160 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="registry-server" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347518 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0663576-2a3e-425d-b34e-448b12474f6a" containerName="registry-server" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347538 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7864b04-4056-48ca-bfc7-cebbcb70128f" containerName="collect-profiles" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.347558 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f55824c-2b72-49cf-b866-c4b3668e20e2" containerName="registry-server" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.348953 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.363962 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-md5vx"] Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.409033 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-catalog-content\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.409186 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nhf8\" (UniqueName: \"kubernetes.io/projected/e7777eaf-cedd-41fc-a644-16374125f6c5-kube-api-access-2nhf8\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.409217 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-utilities\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.510581 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nhf8\" (UniqueName: \"kubernetes.io/projected/e7777eaf-cedd-41fc-a644-16374125f6c5-kube-api-access-2nhf8\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.510836 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-utilities\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.510981 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-catalog-content\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.511437 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-utilities\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.511488 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-catalog-content\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.532418 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nhf8\" (UniqueName: \"kubernetes.io/projected/e7777eaf-cedd-41fc-a644-16374125f6c5-kube-api-access-2nhf8\") pod \"certified-operators-md5vx\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:41 crc kubenswrapper[4861]: I1003 14:30:41.689575 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:42 crc kubenswrapper[4861]: I1003 14:30:42.302916 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-md5vx"] Oct 03 14:30:42 crc kubenswrapper[4861]: I1003 14:30:42.954391 4861 generic.go:334] "Generic (PLEG): container finished" podID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerID="dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8" exitCode=0 Oct 03 14:30:42 crc kubenswrapper[4861]: I1003 14:30:42.954438 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerDied","Data":"dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8"} Oct 03 14:30:42 crc kubenswrapper[4861]: I1003 14:30:42.954843 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerStarted","Data":"0d600f0da99e381eb28223e3860ea6c8561b1cef43cad07d5242c1e380c6b7ad"} Oct 03 14:30:44 crc kubenswrapper[4861]: I1003 14:30:44.976133 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerStarted","Data":"348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3"} Oct 03 14:30:47 crc kubenswrapper[4861]: I1003 14:30:47.000900 4861 generic.go:334] "Generic (PLEG): container finished" podID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerID="348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3" exitCode=0 Oct 03 14:30:47 crc kubenswrapper[4861]: I1003 14:30:47.000976 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerDied","Data":"348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3"} Oct 03 14:30:49 crc kubenswrapper[4861]: I1003 14:30:49.029842 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerStarted","Data":"3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7"} Oct 03 14:30:49 crc kubenswrapper[4861]: I1003 14:30:49.055980 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-md5vx" podStartSLOduration=3.212867455 podStartE2EDuration="8.055953493s" podCreationTimestamp="2025-10-03 14:30:41 +0000 UTC" firstStartedPulling="2025-10-03 14:30:42.955975391 +0000 UTC m=+3556.953960438" lastFinishedPulling="2025-10-03 14:30:47.799061429 +0000 UTC m=+3561.797046476" observedRunningTime="2025-10-03 14:30:49.047505698 +0000 UTC m=+3563.045490765" watchObservedRunningTime="2025-10-03 14:30:49.055953493 +0000 UTC m=+3563.053938550" Oct 03 14:30:51 crc kubenswrapper[4861]: I1003 14:30:51.690186 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:51 crc kubenswrapper[4861]: I1003 14:30:51.691644 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:51 crc kubenswrapper[4861]: I1003 14:30:51.737951 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:53 crc kubenswrapper[4861]: I1003 14:30:53.113816 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:53 crc kubenswrapper[4861]: I1003 14:30:53.166911 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-md5vx"] Oct 03 14:30:55 crc kubenswrapper[4861]: I1003 14:30:55.077213 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-md5vx" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="registry-server" containerID="cri-o://3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7" gracePeriod=2 Oct 03 14:30:55 crc kubenswrapper[4861]: I1003 14:30:55.817063 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.000314 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nhf8\" (UniqueName: \"kubernetes.io/projected/e7777eaf-cedd-41fc-a644-16374125f6c5-kube-api-access-2nhf8\") pod \"e7777eaf-cedd-41fc-a644-16374125f6c5\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.000680 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-utilities\") pod \"e7777eaf-cedd-41fc-a644-16374125f6c5\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.000777 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-catalog-content\") pod \"e7777eaf-cedd-41fc-a644-16374125f6c5\" (UID: \"e7777eaf-cedd-41fc-a644-16374125f6c5\") " Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.002572 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-utilities" (OuterVolumeSpecName: "utilities") pod "e7777eaf-cedd-41fc-a644-16374125f6c5" (UID: "e7777eaf-cedd-41fc-a644-16374125f6c5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.018810 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7777eaf-cedd-41fc-a644-16374125f6c5-kube-api-access-2nhf8" (OuterVolumeSpecName: "kube-api-access-2nhf8") pod "e7777eaf-cedd-41fc-a644-16374125f6c5" (UID: "e7777eaf-cedd-41fc-a644-16374125f6c5"). InnerVolumeSpecName "kube-api-access-2nhf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.061557 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7777eaf-cedd-41fc-a644-16374125f6c5" (UID: "e7777eaf-cedd-41fc-a644-16374125f6c5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.087338 4861 generic.go:334] "Generic (PLEG): container finished" podID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerID="3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7" exitCode=0 Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.087378 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerDied","Data":"3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7"} Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.087405 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-md5vx" event={"ID":"e7777eaf-cedd-41fc-a644-16374125f6c5","Type":"ContainerDied","Data":"0d600f0da99e381eb28223e3860ea6c8561b1cef43cad07d5242c1e380c6b7ad"} Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.087421 4861 scope.go:117] "RemoveContainer" containerID="3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.087535 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-md5vx" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.104975 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.105079 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nhf8\" (UniqueName: \"kubernetes.io/projected/e7777eaf-cedd-41fc-a644-16374125f6c5-kube-api-access-2nhf8\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.105099 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7777eaf-cedd-41fc-a644-16374125f6c5-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.113067 4861 scope.go:117] "RemoveContainer" containerID="348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.157827 4861 scope.go:117] "RemoveContainer" containerID="dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.158157 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-md5vx"] Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.195793 4861 scope.go:117] "RemoveContainer" containerID="3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.198797 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-md5vx"] Oct 03 14:30:56 crc kubenswrapper[4861]: E1003 14:30:56.201362 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7\": container with ID starting with 3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7 not found: ID does not exist" containerID="3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.201398 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7"} err="failed to get container status \"3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7\": rpc error: code = NotFound desc = could not find container \"3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7\": container with ID starting with 3a63c7866df022535df91390ef4d537d548c3454e23e74824508721c25ee2cd7 not found: ID does not exist" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.201420 4861 scope.go:117] "RemoveContainer" containerID="348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3" Oct 03 14:30:56 crc kubenswrapper[4861]: E1003 14:30:56.204412 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3\": container with ID starting with 348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3 not found: ID does not exist" containerID="348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.204458 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3"} err="failed to get container status \"348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3\": rpc error: code = NotFound desc = could not find container \"348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3\": container with ID starting with 348635d598f8ee70a41eb2441ee311059f009f91697f2caf01bfe4b1679c04e3 not found: ID does not exist" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.204486 4861 scope.go:117] "RemoveContainer" containerID="dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8" Oct 03 14:30:56 crc kubenswrapper[4861]: E1003 14:30:56.204730 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8\": container with ID starting with dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8 not found: ID does not exist" containerID="dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.204752 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8"} err="failed to get container status \"dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8\": rpc error: code = NotFound desc = could not find container \"dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8\": container with ID starting with dd1c32e85d5288f72b30171819384c560c4382a6ae57df052535568178a7e6d8 not found: ID does not exist" Oct 03 14:30:56 crc kubenswrapper[4861]: I1003 14:30:56.692793 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" path="/var/lib/kubelet/pods/e7777eaf-cedd-41fc-a644-16374125f6c5/volumes" Oct 03 14:30:59 crc kubenswrapper[4861]: I1003 14:30:59.473117 4861 scope.go:117] "RemoveContainer" containerID="180e2ac430ddc953196a9766ff02af22fe2178bd1fe1a156cdcdf8fe29c5b54a" Oct 03 14:31:00 crc kubenswrapper[4861]: I1003 14:31:00.144898 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:31:00 crc kubenswrapper[4861]: I1003 14:31:00.144946 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.145251 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.145705 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.145742 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.146450 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.146498 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" gracePeriod=600 Oct 03 14:31:30 crc kubenswrapper[4861]: E1003 14:31:30.292658 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.398758 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" exitCode=0 Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.398807 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5"} Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.399225 4861 scope.go:117] "RemoveContainer" containerID="a705db865e2b9f8f66c5b6f71ab61d8b57fb8b7e0d0df1c91ce66057efed7c07" Oct 03 14:31:30 crc kubenswrapper[4861]: I1003 14:31:30.400306 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:31:30 crc kubenswrapper[4861]: E1003 14:31:30.400723 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:31:42 crc kubenswrapper[4861]: I1003 14:31:42.681962 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:31:42 crc kubenswrapper[4861]: E1003 14:31:42.682745 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:31:55 crc kubenswrapper[4861]: I1003 14:31:55.681654 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:31:55 crc kubenswrapper[4861]: E1003 14:31:55.682409 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:32:08 crc kubenswrapper[4861]: I1003 14:32:08.681483 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:32:08 crc kubenswrapper[4861]: E1003 14:32:08.682156 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:32:19 crc kubenswrapper[4861]: I1003 14:32:19.682366 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:32:19 crc kubenswrapper[4861]: E1003 14:32:19.683045 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:32:31 crc kubenswrapper[4861]: I1003 14:32:31.681767 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:32:31 crc kubenswrapper[4861]: E1003 14:32:31.682357 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:32:42 crc kubenswrapper[4861]: I1003 14:32:42.684637 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:32:42 crc kubenswrapper[4861]: E1003 14:32:42.685455 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:32:53 crc kubenswrapper[4861]: I1003 14:32:53.681424 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:32:53 crc kubenswrapper[4861]: E1003 14:32:53.682141 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:33:04 crc kubenswrapper[4861]: I1003 14:33:04.681152 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:33:04 crc kubenswrapper[4861]: E1003 14:33:04.682175 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:33:19 crc kubenswrapper[4861]: I1003 14:33:19.682500 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:33:19 crc kubenswrapper[4861]: E1003 14:33:19.683241 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:33:30 crc kubenswrapper[4861]: I1003 14:33:30.680690 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:33:30 crc kubenswrapper[4861]: E1003 14:33:30.681311 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:33:43 crc kubenswrapper[4861]: I1003 14:33:43.682576 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:33:43 crc kubenswrapper[4861]: E1003 14:33:43.683404 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:33:55 crc kubenswrapper[4861]: I1003 14:33:55.681833 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:33:55 crc kubenswrapper[4861]: E1003 14:33:55.682666 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:34:09 crc kubenswrapper[4861]: I1003 14:34:09.682038 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:34:09 crc kubenswrapper[4861]: E1003 14:34:09.682770 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:34:24 crc kubenswrapper[4861]: I1003 14:34:24.681824 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:34:24 crc kubenswrapper[4861]: E1003 14:34:24.683017 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:34:36 crc kubenswrapper[4861]: I1003 14:34:36.700126 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:34:36 crc kubenswrapper[4861]: E1003 14:34:36.700942 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:34:51 crc kubenswrapper[4861]: I1003 14:34:51.681762 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:34:51 crc kubenswrapper[4861]: E1003 14:34:51.683797 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:35:06 crc kubenswrapper[4861]: I1003 14:35:06.691175 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:35:06 crc kubenswrapper[4861]: E1003 14:35:06.693035 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:35:17 crc kubenswrapper[4861]: I1003 14:35:17.680982 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:35:17 crc kubenswrapper[4861]: E1003 14:35:17.682306 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:35:31 crc kubenswrapper[4861]: I1003 14:35:31.681628 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:35:31 crc kubenswrapper[4861]: E1003 14:35:31.682510 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:35:42 crc kubenswrapper[4861]: I1003 14:35:42.682026 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:35:42 crc kubenswrapper[4861]: E1003 14:35:42.682960 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:35:53 crc kubenswrapper[4861]: I1003 14:35:53.691404 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:35:53 crc kubenswrapper[4861]: E1003 14:35:53.692610 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:36:04 crc kubenswrapper[4861]: I1003 14:36:04.680852 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:36:04 crc kubenswrapper[4861]: E1003 14:36:04.681702 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:36:19 crc kubenswrapper[4861]: I1003 14:36:19.682021 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:36:19 crc kubenswrapper[4861]: E1003 14:36:19.682713 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:36:30 crc kubenswrapper[4861]: I1003 14:36:30.680879 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:36:30 crc kubenswrapper[4861]: I1003 14:36:30.993882 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"a3b0b75cb1e4914beee2778fba1a3910a480a03a5aecf09b3439a87cfc51f8c0"} Oct 03 14:38:30 crc kubenswrapper[4861]: I1003 14:38:30.145473 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:38:30 crc kubenswrapper[4861]: I1003 14:38:30.146117 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:39:00 crc kubenswrapper[4861]: I1003 14:39:00.145895 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:39:00 crc kubenswrapper[4861]: I1003 14:39:00.148545 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.145206 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.145819 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.145872 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.146690 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3b0b75cb1e4914beee2778fba1a3910a480a03a5aecf09b3439a87cfc51f8c0"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.146760 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://a3b0b75cb1e4914beee2778fba1a3910a480a03a5aecf09b3439a87cfc51f8c0" gracePeriod=600 Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.618542 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="a3b0b75cb1e4914beee2778fba1a3910a480a03a5aecf09b3439a87cfc51f8c0" exitCode=0 Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.618608 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"a3b0b75cb1e4914beee2778fba1a3910a480a03a5aecf09b3439a87cfc51f8c0"} Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.618920 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12"} Oct 03 14:39:30 crc kubenswrapper[4861]: I1003 14:39:30.618949 4861 scope.go:117] "RemoveContainer" containerID="a6f40733b4a6b246a12e714e8856dfa517e95c3d12568e4a363e1ac4f16f07b5" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.870549 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j6g5l"] Oct 03 14:39:50 crc kubenswrapper[4861]: E1003 14:39:50.871820 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="extract-content" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.871837 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="extract-content" Oct 03 14:39:50 crc kubenswrapper[4861]: E1003 14:39:50.871855 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="registry-server" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.871863 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="registry-server" Oct 03 14:39:50 crc kubenswrapper[4861]: E1003 14:39:50.871879 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="extract-utilities" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.871888 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="extract-utilities" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.872166 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7777eaf-cedd-41fc-a644-16374125f6c5" containerName="registry-server" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.876280 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.891132 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j6g5l"] Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.931330 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-catalog-content\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.931740 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97wrl\" (UniqueName: \"kubernetes.io/projected/a286eaf4-0929-4027-a5e1-4ed1a363567e-kube-api-access-97wrl\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:50 crc kubenswrapper[4861]: I1003 14:39:50.931854 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-utilities\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.033465 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-catalog-content\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.033807 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97wrl\" (UniqueName: \"kubernetes.io/projected/a286eaf4-0929-4027-a5e1-4ed1a363567e-kube-api-access-97wrl\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.033903 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-utilities\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.033963 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-catalog-content\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.034515 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-utilities\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.096198 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97wrl\" (UniqueName: \"kubernetes.io/projected/a286eaf4-0929-4027-a5e1-4ed1a363567e-kube-api-access-97wrl\") pod \"community-operators-j6g5l\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.198689 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:39:51 crc kubenswrapper[4861]: I1003 14:39:51.836696 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j6g5l"] Oct 03 14:39:52 crc kubenswrapper[4861]: I1003 14:39:52.827224 4861 generic.go:334] "Generic (PLEG): container finished" podID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerID="bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167" exitCode=0 Oct 03 14:39:52 crc kubenswrapper[4861]: I1003 14:39:52.827351 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerDied","Data":"bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167"} Oct 03 14:39:52 crc kubenswrapper[4861]: I1003 14:39:52.827514 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerStarted","Data":"c8a39512bdf1cc70b6a1a8d5916e8ebf2a86be0ff824ba9434afb776997924af"} Oct 03 14:39:52 crc kubenswrapper[4861]: I1003 14:39:52.829301 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:39:54 crc kubenswrapper[4861]: I1003 14:39:54.856507 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerStarted","Data":"19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2"} Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.234475 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-56447"] Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.287427 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-56447"] Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.287624 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.319923 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-utilities\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.319991 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94xs7\" (UniqueName: \"kubernetes.io/projected/8d97e442-df88-4744-841e-ee30d8786a2b-kube-api-access-94xs7\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.320028 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-catalog-content\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.421486 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-utilities\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.421561 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94xs7\" (UniqueName: \"kubernetes.io/projected/8d97e442-df88-4744-841e-ee30d8786a2b-kube-api-access-94xs7\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.421600 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-catalog-content\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.422470 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-catalog-content\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.422598 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-utilities\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.455367 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94xs7\" (UniqueName: \"kubernetes.io/projected/8d97e442-df88-4744-841e-ee30d8786a2b-kube-api-access-94xs7\") pod \"redhat-marketplace-56447\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.618009 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bllq9"] Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.620899 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.622174 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.627644 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bllq9"] Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.727061 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-utilities\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.727178 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-catalog-content\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.727273 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htt6n\" (UniqueName: \"kubernetes.io/projected/d4732fde-1728-4190-9572-1601b8cb0db4-kube-api-access-htt6n\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.829194 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-utilities\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.829277 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-catalog-content\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.829322 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htt6n\" (UniqueName: \"kubernetes.io/projected/d4732fde-1728-4190-9572-1601b8cb0db4-kube-api-access-htt6n\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.829973 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-utilities\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.830170 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-catalog-content\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.883189 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htt6n\" (UniqueName: \"kubernetes.io/projected/d4732fde-1728-4190-9572-1601b8cb0db4-kube-api-access-htt6n\") pod \"redhat-operators-bllq9\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.891639 4861 generic.go:334] "Generic (PLEG): container finished" podID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerID="19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2" exitCode=0 Oct 03 14:39:55 crc kubenswrapper[4861]: I1003 14:39:55.892165 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerDied","Data":"19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2"} Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.060701 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.177533 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-56447"] Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.563639 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bllq9"] Oct 03 14:39:56 crc kubenswrapper[4861]: W1003 14:39:56.578255 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4732fde_1728_4190_9572_1601b8cb0db4.slice/crio-0329637347da10a4a4b70bf5842fc92d114a811a82345fb2160566fee95a9d4b WatchSource:0}: Error finding container 0329637347da10a4a4b70bf5842fc92d114a811a82345fb2160566fee95a9d4b: Status 404 returned error can't find the container with id 0329637347da10a4a4b70bf5842fc92d114a811a82345fb2160566fee95a9d4b Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.911939 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerStarted","Data":"738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354"} Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.912303 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerStarted","Data":"0329637347da10a4a4b70bf5842fc92d114a811a82345fb2160566fee95a9d4b"} Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.919768 4861 generic.go:334] "Generic (PLEG): container finished" podID="8d97e442-df88-4744-841e-ee30d8786a2b" containerID="f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6" exitCode=0 Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.919844 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56447" event={"ID":"8d97e442-df88-4744-841e-ee30d8786a2b","Type":"ContainerDied","Data":"f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6"} Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.919873 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56447" event={"ID":"8d97e442-df88-4744-841e-ee30d8786a2b","Type":"ContainerStarted","Data":"544193b9273312bd049c6f24e7d37ef10912f57012b786ce7866c2640492d1b9"} Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.932212 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerStarted","Data":"172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3"} Oct 03 14:39:56 crc kubenswrapper[4861]: I1003 14:39:56.980014 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j6g5l" podStartSLOduration=3.376709647 podStartE2EDuration="6.979991651s" podCreationTimestamp="2025-10-03 14:39:50 +0000 UTC" firstStartedPulling="2025-10-03 14:39:52.829073214 +0000 UTC m=+4106.827058261" lastFinishedPulling="2025-10-03 14:39:56.432355218 +0000 UTC m=+4110.430340265" observedRunningTime="2025-10-03 14:39:56.977733672 +0000 UTC m=+4110.975718719" watchObservedRunningTime="2025-10-03 14:39:56.979991651 +0000 UTC m=+4110.977976698" Oct 03 14:39:57 crc kubenswrapper[4861]: I1003 14:39:57.946999 4861 generic.go:334] "Generic (PLEG): container finished" podID="d4732fde-1728-4190-9572-1601b8cb0db4" containerID="738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354" exitCode=0 Oct 03 14:39:57 crc kubenswrapper[4861]: I1003 14:39:57.947179 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerDied","Data":"738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354"} Oct 03 14:39:58 crc kubenswrapper[4861]: I1003 14:39:58.969400 4861 generic.go:334] "Generic (PLEG): container finished" podID="8d97e442-df88-4744-841e-ee30d8786a2b" containerID="8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d" exitCode=0 Oct 03 14:39:58 crc kubenswrapper[4861]: I1003 14:39:58.969457 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56447" event={"ID":"8d97e442-df88-4744-841e-ee30d8786a2b","Type":"ContainerDied","Data":"8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d"} Oct 03 14:40:00 crc kubenswrapper[4861]: I1003 14:40:00.992846 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerStarted","Data":"d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf"} Oct 03 14:40:01 crc kubenswrapper[4861]: I1003 14:40:01.199687 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:40:01 crc kubenswrapper[4861]: I1003 14:40:01.200000 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:40:01 crc kubenswrapper[4861]: I1003 14:40:01.246051 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:40:02 crc kubenswrapper[4861]: I1003 14:40:02.005971 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56447" event={"ID":"8d97e442-df88-4744-841e-ee30d8786a2b","Type":"ContainerStarted","Data":"8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111"} Oct 03 14:40:02 crc kubenswrapper[4861]: I1003 14:40:02.032303 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-56447" podStartSLOduration=3.124116452 podStartE2EDuration="7.03227696s" podCreationTimestamp="2025-10-03 14:39:55 +0000 UTC" firstStartedPulling="2025-10-03 14:39:56.927888352 +0000 UTC m=+4110.925873399" lastFinishedPulling="2025-10-03 14:40:00.83604886 +0000 UTC m=+4114.834033907" observedRunningTime="2025-10-03 14:40:02.023403495 +0000 UTC m=+4116.021388552" watchObservedRunningTime="2025-10-03 14:40:02.03227696 +0000 UTC m=+4116.030262027" Oct 03 14:40:02 crc kubenswrapper[4861]: I1003 14:40:02.077279 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:40:02 crc kubenswrapper[4861]: I1003 14:40:02.815306 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j6g5l"] Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.023446 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j6g5l" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="registry-server" containerID="cri-o://172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3" gracePeriod=2 Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.585092 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.718121 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-utilities\") pod \"a286eaf4-0929-4027-a5e1-4ed1a363567e\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.718186 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-catalog-content\") pod \"a286eaf4-0929-4027-a5e1-4ed1a363567e\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.718218 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97wrl\" (UniqueName: \"kubernetes.io/projected/a286eaf4-0929-4027-a5e1-4ed1a363567e-kube-api-access-97wrl\") pod \"a286eaf4-0929-4027-a5e1-4ed1a363567e\" (UID: \"a286eaf4-0929-4027-a5e1-4ed1a363567e\") " Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.719091 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-utilities" (OuterVolumeSpecName: "utilities") pod "a286eaf4-0929-4027-a5e1-4ed1a363567e" (UID: "a286eaf4-0929-4027-a5e1-4ed1a363567e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.732506 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a286eaf4-0929-4027-a5e1-4ed1a363567e-kube-api-access-97wrl" (OuterVolumeSpecName: "kube-api-access-97wrl") pod "a286eaf4-0929-4027-a5e1-4ed1a363567e" (UID: "a286eaf4-0929-4027-a5e1-4ed1a363567e"). InnerVolumeSpecName "kube-api-access-97wrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.767668 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a286eaf4-0929-4027-a5e1-4ed1a363567e" (UID: "a286eaf4-0929-4027-a5e1-4ed1a363567e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.820508 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.820812 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a286eaf4-0929-4027-a5e1-4ed1a363567e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:04 crc kubenswrapper[4861]: I1003 14:40:04.820916 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97wrl\" (UniqueName: \"kubernetes.io/projected/a286eaf4-0929-4027-a5e1-4ed1a363567e-kube-api-access-97wrl\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.033397 4861 generic.go:334] "Generic (PLEG): container finished" podID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerID="172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3" exitCode=0 Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.033456 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j6g5l" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.033469 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerDied","Data":"172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3"} Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.034619 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j6g5l" event={"ID":"a286eaf4-0929-4027-a5e1-4ed1a363567e","Type":"ContainerDied","Data":"c8a39512bdf1cc70b6a1a8d5916e8ebf2a86be0ff824ba9434afb776997924af"} Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.034661 4861 scope.go:117] "RemoveContainer" containerID="172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.075011 4861 scope.go:117] "RemoveContainer" containerID="19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.077280 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j6g5l"] Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.085852 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j6g5l"] Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.098056 4861 scope.go:117] "RemoveContainer" containerID="bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.146340 4861 scope.go:117] "RemoveContainer" containerID="172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3" Oct 03 14:40:05 crc kubenswrapper[4861]: E1003 14:40:05.146807 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3\": container with ID starting with 172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3 not found: ID does not exist" containerID="172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.146833 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3"} err="failed to get container status \"172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3\": rpc error: code = NotFound desc = could not find container \"172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3\": container with ID starting with 172f82c33dc9883956f9ee3f9faf7102b185624121585d33742d318c1ec6edc3 not found: ID does not exist" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.146854 4861 scope.go:117] "RemoveContainer" containerID="19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2" Oct 03 14:40:05 crc kubenswrapper[4861]: E1003 14:40:05.147088 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2\": container with ID starting with 19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2 not found: ID does not exist" containerID="19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.147105 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2"} err="failed to get container status \"19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2\": rpc error: code = NotFound desc = could not find container \"19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2\": container with ID starting with 19043428d6b064bf619943859ca6ccb16b03e9c1981ed1aa2084516ccb0d5cc2 not found: ID does not exist" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.147119 4861 scope.go:117] "RemoveContainer" containerID="bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167" Oct 03 14:40:05 crc kubenswrapper[4861]: E1003 14:40:05.148291 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167\": container with ID starting with bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167 not found: ID does not exist" containerID="bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.148319 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167"} err="failed to get container status \"bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167\": rpc error: code = NotFound desc = could not find container \"bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167\": container with ID starting with bc569356be8a0846ff3a64dce2ba15cbbc2a51987494db418511c60c706e5167 not found: ID does not exist" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.623310 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:40:05 crc kubenswrapper[4861]: I1003 14:40:05.623724 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:40:06 crc kubenswrapper[4861]: I1003 14:40:06.044795 4861 generic.go:334] "Generic (PLEG): container finished" podID="d4732fde-1728-4190-9572-1601b8cb0db4" containerID="d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf" exitCode=0 Oct 03 14:40:06 crc kubenswrapper[4861]: I1003 14:40:06.044878 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerDied","Data":"d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf"} Oct 03 14:40:06 crc kubenswrapper[4861]: I1003 14:40:06.675469 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-56447" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="registry-server" probeResult="failure" output=< Oct 03 14:40:06 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:40:06 crc kubenswrapper[4861]: > Oct 03 14:40:06 crc kubenswrapper[4861]: I1003 14:40:06.698495 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" path="/var/lib/kubelet/pods/a286eaf4-0929-4027-a5e1-4ed1a363567e/volumes" Oct 03 14:40:07 crc kubenswrapper[4861]: I1003 14:40:07.061007 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerStarted","Data":"a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b"} Oct 03 14:40:07 crc kubenswrapper[4861]: I1003 14:40:07.094292 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bllq9" podStartSLOduration=3.598120004 podStartE2EDuration="12.094271416s" podCreationTimestamp="2025-10-03 14:39:55 +0000 UTC" firstStartedPulling="2025-10-03 14:39:57.950543674 +0000 UTC m=+4111.948528721" lastFinishedPulling="2025-10-03 14:40:06.446695086 +0000 UTC m=+4120.444680133" observedRunningTime="2025-10-03 14:40:07.085850013 +0000 UTC m=+4121.083835200" watchObservedRunningTime="2025-10-03 14:40:07.094271416 +0000 UTC m=+4121.092256473" Oct 03 14:40:15 crc kubenswrapper[4861]: I1003 14:40:15.687819 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:40:15 crc kubenswrapper[4861]: I1003 14:40:15.746544 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:40:15 crc kubenswrapper[4861]: I1003 14:40:15.945328 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-56447"] Oct 03 14:40:16 crc kubenswrapper[4861]: I1003 14:40:16.061682 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:40:16 crc kubenswrapper[4861]: I1003 14:40:16.061758 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.115131 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bllq9" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="registry-server" probeResult="failure" output=< Oct 03 14:40:17 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:40:17 crc kubenswrapper[4861]: > Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.151438 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-56447" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="registry-server" containerID="cri-o://8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111" gracePeriod=2 Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.740774 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.904096 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94xs7\" (UniqueName: \"kubernetes.io/projected/8d97e442-df88-4744-841e-ee30d8786a2b-kube-api-access-94xs7\") pod \"8d97e442-df88-4744-841e-ee30d8786a2b\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.905215 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-catalog-content\") pod \"8d97e442-df88-4744-841e-ee30d8786a2b\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.905294 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-utilities\") pod \"8d97e442-df88-4744-841e-ee30d8786a2b\" (UID: \"8d97e442-df88-4744-841e-ee30d8786a2b\") " Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.905970 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-utilities" (OuterVolumeSpecName: "utilities") pod "8d97e442-df88-4744-841e-ee30d8786a2b" (UID: "8d97e442-df88-4744-841e-ee30d8786a2b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.909927 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d97e442-df88-4744-841e-ee30d8786a2b-kube-api-access-94xs7" (OuterVolumeSpecName: "kube-api-access-94xs7") pod "8d97e442-df88-4744-841e-ee30d8786a2b" (UID: "8d97e442-df88-4744-841e-ee30d8786a2b"). InnerVolumeSpecName "kube-api-access-94xs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:40:17 crc kubenswrapper[4861]: I1003 14:40:17.917369 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d97e442-df88-4744-841e-ee30d8786a2b" (UID: "8d97e442-df88-4744-841e-ee30d8786a2b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.007978 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94xs7\" (UniqueName: \"kubernetes.io/projected/8d97e442-df88-4744-841e-ee30d8786a2b-kube-api-access-94xs7\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.008016 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.008025 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d97e442-df88-4744-841e-ee30d8786a2b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.165387 4861 generic.go:334] "Generic (PLEG): container finished" podID="8d97e442-df88-4744-841e-ee30d8786a2b" containerID="8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111" exitCode=0 Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.165479 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56447" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.165479 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56447" event={"ID":"8d97e442-df88-4744-841e-ee30d8786a2b","Type":"ContainerDied","Data":"8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111"} Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.165621 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56447" event={"ID":"8d97e442-df88-4744-841e-ee30d8786a2b","Type":"ContainerDied","Data":"544193b9273312bd049c6f24e7d37ef10912f57012b786ce7866c2640492d1b9"} Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.165650 4861 scope.go:117] "RemoveContainer" containerID="8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.209839 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-56447"] Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.216536 4861 scope.go:117] "RemoveContainer" containerID="8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.219648 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-56447"] Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.244431 4861 scope.go:117] "RemoveContainer" containerID="f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.307269 4861 scope.go:117] "RemoveContainer" containerID="8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111" Oct 03 14:40:18 crc kubenswrapper[4861]: E1003 14:40:18.308130 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111\": container with ID starting with 8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111 not found: ID does not exist" containerID="8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.308158 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111"} err="failed to get container status \"8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111\": rpc error: code = NotFound desc = could not find container \"8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111\": container with ID starting with 8710317aa5d4fde2c50e687b1e62072eb6927d236211b1fda760e4afde0d1111 not found: ID does not exist" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.308180 4861 scope.go:117] "RemoveContainer" containerID="8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d" Oct 03 14:40:18 crc kubenswrapper[4861]: E1003 14:40:18.308449 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d\": container with ID starting with 8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d not found: ID does not exist" containerID="8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.308475 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d"} err="failed to get container status \"8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d\": rpc error: code = NotFound desc = could not find container \"8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d\": container with ID starting with 8fdaf01d2566909bdc3467e207ff93be7e11aedeec7f64d2e6855e8df29dad7d not found: ID does not exist" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.308489 4861 scope.go:117] "RemoveContainer" containerID="f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6" Oct 03 14:40:18 crc kubenswrapper[4861]: E1003 14:40:18.308796 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6\": container with ID starting with f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6 not found: ID does not exist" containerID="f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.308850 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6"} err="failed to get container status \"f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6\": rpc error: code = NotFound desc = could not find container \"f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6\": container with ID starting with f2f97886cc4747c053579409aeeef2fe330420119bbe3c2daa323d25660a7be6 not found: ID does not exist" Oct 03 14:40:18 crc kubenswrapper[4861]: I1003 14:40:18.695187 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" path="/var/lib/kubelet/pods/8d97e442-df88-4744-841e-ee30d8786a2b/volumes" Oct 03 14:40:27 crc kubenswrapper[4861]: I1003 14:40:27.107381 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bllq9" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="registry-server" probeResult="failure" output=< Oct 03 14:40:27 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:40:27 crc kubenswrapper[4861]: > Oct 03 14:40:36 crc kubenswrapper[4861]: I1003 14:40:36.100494 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:40:36 crc kubenswrapper[4861]: I1003 14:40:36.147942 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:40:36 crc kubenswrapper[4861]: I1003 14:40:36.342835 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bllq9"] Oct 03 14:40:37 crc kubenswrapper[4861]: I1003 14:40:37.362946 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bllq9" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="registry-server" containerID="cri-o://a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b" gracePeriod=2 Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.030919 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.129869 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-utilities\") pod \"d4732fde-1728-4190-9572-1601b8cb0db4\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.129989 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htt6n\" (UniqueName: \"kubernetes.io/projected/d4732fde-1728-4190-9572-1601b8cb0db4-kube-api-access-htt6n\") pod \"d4732fde-1728-4190-9572-1601b8cb0db4\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.130105 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-catalog-content\") pod \"d4732fde-1728-4190-9572-1601b8cb0db4\" (UID: \"d4732fde-1728-4190-9572-1601b8cb0db4\") " Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.130771 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-utilities" (OuterVolumeSpecName: "utilities") pod "d4732fde-1728-4190-9572-1601b8cb0db4" (UID: "d4732fde-1728-4190-9572-1601b8cb0db4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.151443 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4732fde-1728-4190-9572-1601b8cb0db4-kube-api-access-htt6n" (OuterVolumeSpecName: "kube-api-access-htt6n") pod "d4732fde-1728-4190-9572-1601b8cb0db4" (UID: "d4732fde-1728-4190-9572-1601b8cb0db4"). InnerVolumeSpecName "kube-api-access-htt6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.232828 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4732fde-1728-4190-9572-1601b8cb0db4" (UID: "d4732fde-1728-4190-9572-1601b8cb0db4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.233955 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.234049 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htt6n\" (UniqueName: \"kubernetes.io/projected/d4732fde-1728-4190-9572-1601b8cb0db4-kube-api-access-htt6n\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.234135 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4732fde-1728-4190-9572-1601b8cb0db4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.372735 4861 generic.go:334] "Generic (PLEG): container finished" podID="d4732fde-1728-4190-9572-1601b8cb0db4" containerID="a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b" exitCode=0 Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.372921 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerDied","Data":"a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b"} Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.373462 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bllq9" event={"ID":"d4732fde-1728-4190-9572-1601b8cb0db4","Type":"ContainerDied","Data":"0329637347da10a4a4b70bf5842fc92d114a811a82345fb2160566fee95a9d4b"} Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.373535 4861 scope.go:117] "RemoveContainer" containerID="a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.373033 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bllq9" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.403569 4861 scope.go:117] "RemoveContainer" containerID="d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.422362 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bllq9"] Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.432740 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bllq9"] Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.438101 4861 scope.go:117] "RemoveContainer" containerID="738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.484957 4861 scope.go:117] "RemoveContainer" containerID="a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b" Oct 03 14:40:38 crc kubenswrapper[4861]: E1003 14:40:38.485676 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b\": container with ID starting with a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b not found: ID does not exist" containerID="a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.485729 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b"} err="failed to get container status \"a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b\": rpc error: code = NotFound desc = could not find container \"a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b\": container with ID starting with a4efeae97f0477e2be0263d4e6a2967d130c06a45b804c9f73608d21861d881b not found: ID does not exist" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.485758 4861 scope.go:117] "RemoveContainer" containerID="d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf" Oct 03 14:40:38 crc kubenswrapper[4861]: E1003 14:40:38.487305 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf\": container with ID starting with d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf not found: ID does not exist" containerID="d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.487427 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf"} err="failed to get container status \"d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf\": rpc error: code = NotFound desc = could not find container \"d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf\": container with ID starting with d1d06496da31a1a3884ec7d0e3583ccec11ef7bda995c4d58431e0114cbe3bbf not found: ID does not exist" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.487535 4861 scope.go:117] "RemoveContainer" containerID="738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354" Oct 03 14:40:38 crc kubenswrapper[4861]: E1003 14:40:38.488799 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354\": container with ID starting with 738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354 not found: ID does not exist" containerID="738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.488883 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354"} err="failed to get container status \"738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354\": rpc error: code = NotFound desc = could not find container \"738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354\": container with ID starting with 738004f5fe979af05c0431df6f1c16b6902662d0b67ad79be1017ac3eb21b354 not found: ID does not exist" Oct 03 14:40:38 crc kubenswrapper[4861]: I1003 14:40:38.693715 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" path="/var/lib/kubelet/pods/d4732fde-1728-4190-9572-1601b8cb0db4/volumes" Oct 03 14:41:30 crc kubenswrapper[4861]: I1003 14:41:30.145216 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:41:30 crc kubenswrapper[4861]: I1003 14:41:30.145756 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:42:00 crc kubenswrapper[4861]: I1003 14:42:00.144894 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:42:00 crc kubenswrapper[4861]: I1003 14:42:00.145547 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:42:30 crc kubenswrapper[4861]: I1003 14:42:30.144698 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:42:30 crc kubenswrapper[4861]: I1003 14:42:30.145064 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:42:30 crc kubenswrapper[4861]: I1003 14:42:30.145099 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:42:30 crc kubenswrapper[4861]: I1003 14:42:30.145781 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:42:30 crc kubenswrapper[4861]: I1003 14:42:30.145843 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" gracePeriod=600 Oct 03 14:42:30 crc kubenswrapper[4861]: E1003 14:42:30.421753 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:42:31 crc kubenswrapper[4861]: I1003 14:42:31.370880 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" exitCode=0 Oct 03 14:42:31 crc kubenswrapper[4861]: I1003 14:42:31.370942 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12"} Oct 03 14:42:31 crc kubenswrapper[4861]: I1003 14:42:31.370991 4861 scope.go:117] "RemoveContainer" containerID="a3b0b75cb1e4914beee2778fba1a3910a480a03a5aecf09b3439a87cfc51f8c0" Oct 03 14:42:31 crc kubenswrapper[4861]: I1003 14:42:31.371984 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:42:31 crc kubenswrapper[4861]: E1003 14:42:31.372962 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:42:41 crc kubenswrapper[4861]: I1003 14:42:41.682106 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:42:41 crc kubenswrapper[4861]: E1003 14:42:41.683975 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:42:55 crc kubenswrapper[4861]: I1003 14:42:55.681660 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:42:55 crc kubenswrapper[4861]: E1003 14:42:55.683736 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:43:07 crc kubenswrapper[4861]: I1003 14:43:07.680966 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:43:07 crc kubenswrapper[4861]: E1003 14:43:07.682944 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:43:20 crc kubenswrapper[4861]: I1003 14:43:20.681057 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:43:20 crc kubenswrapper[4861]: E1003 14:43:20.681974 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:43:35 crc kubenswrapper[4861]: I1003 14:43:35.682401 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:43:35 crc kubenswrapper[4861]: E1003 14:43:35.683953 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:43:49 crc kubenswrapper[4861]: I1003 14:43:49.681432 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:43:49 crc kubenswrapper[4861]: E1003 14:43:49.682462 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:44:02 crc kubenswrapper[4861]: I1003 14:44:02.681273 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:44:02 crc kubenswrapper[4861]: E1003 14:44:02.681895 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:44:15 crc kubenswrapper[4861]: I1003 14:44:15.681877 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:44:15 crc kubenswrapper[4861]: E1003 14:44:15.682768 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:44:30 crc kubenswrapper[4861]: I1003 14:44:30.681046 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:44:30 crc kubenswrapper[4861]: E1003 14:44:30.681780 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:44:41 crc kubenswrapper[4861]: I1003 14:44:41.681295 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:44:41 crc kubenswrapper[4861]: E1003 14:44:41.682564 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:44:52 crc kubenswrapper[4861]: I1003 14:44:52.681719 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:44:52 crc kubenswrapper[4861]: E1003 14:44:52.682854 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.148199 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j"] Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149244 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149266 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149289 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="extract-content" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149300 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="extract-content" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149312 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="extract-utilities" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149319 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="extract-utilities" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149333 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="extract-utilities" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149341 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="extract-utilities" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149348 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149356 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149372 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="extract-content" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149379 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="extract-content" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149399 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149407 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149421 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="extract-content" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149428 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="extract-content" Oct 03 14:45:00 crc kubenswrapper[4861]: E1003 14:45:00.149453 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="extract-utilities" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149461 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="extract-utilities" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149713 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a286eaf4-0929-4027-a5e1-4ed1a363567e" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149735 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d97e442-df88-4744-841e-ee30d8786a2b" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.149748 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4732fde-1728-4190-9572-1601b8cb0db4" containerName="registry-server" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.150502 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.153530 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.153868 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.163295 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j"] Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.169989 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd93e625-16d6-49a2-b8cc-08381a234ac6-config-volume\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.170364 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd93e625-16d6-49a2-b8cc-08381a234ac6-secret-volume\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.170485 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc5jt\" (UniqueName: \"kubernetes.io/projected/fd93e625-16d6-49a2-b8cc-08381a234ac6-kube-api-access-rc5jt\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.272937 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd93e625-16d6-49a2-b8cc-08381a234ac6-config-volume\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.273031 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd93e625-16d6-49a2-b8cc-08381a234ac6-secret-volume\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.273074 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc5jt\" (UniqueName: \"kubernetes.io/projected/fd93e625-16d6-49a2-b8cc-08381a234ac6-kube-api-access-rc5jt\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.273924 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd93e625-16d6-49a2-b8cc-08381a234ac6-config-volume\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.278939 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd93e625-16d6-49a2-b8cc-08381a234ac6-secret-volume\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.294058 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc5jt\" (UniqueName: \"kubernetes.io/projected/fd93e625-16d6-49a2-b8cc-08381a234ac6-kube-api-access-rc5jt\") pod \"collect-profiles-29325045-r5b8j\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:00 crc kubenswrapper[4861]: I1003 14:45:00.483642 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:01 crc kubenswrapper[4861]: I1003 14:45:01.071516 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j"] Oct 03 14:45:01 crc kubenswrapper[4861]: I1003 14:45:01.713734 4861 generic.go:334] "Generic (PLEG): container finished" podID="fd93e625-16d6-49a2-b8cc-08381a234ac6" containerID="57ee02128034b873dbea2cd472fc295d1bea30209c715e72c947cd29bf1f6e09" exitCode=0 Oct 03 14:45:01 crc kubenswrapper[4861]: I1003 14:45:01.714097 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" event={"ID":"fd93e625-16d6-49a2-b8cc-08381a234ac6","Type":"ContainerDied","Data":"57ee02128034b873dbea2cd472fc295d1bea30209c715e72c947cd29bf1f6e09"} Oct 03 14:45:01 crc kubenswrapper[4861]: I1003 14:45:01.714126 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" event={"ID":"fd93e625-16d6-49a2-b8cc-08381a234ac6","Type":"ContainerStarted","Data":"42608b8a4d23cd61bf929e6fa52b90cbbf1bc2f3a37a36c0502e1461b670f081"} Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.244893 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.436647 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd93e625-16d6-49a2-b8cc-08381a234ac6-config-volume\") pod \"fd93e625-16d6-49a2-b8cc-08381a234ac6\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.436736 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd93e625-16d6-49a2-b8cc-08381a234ac6-secret-volume\") pod \"fd93e625-16d6-49a2-b8cc-08381a234ac6\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.437010 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc5jt\" (UniqueName: \"kubernetes.io/projected/fd93e625-16d6-49a2-b8cc-08381a234ac6-kube-api-access-rc5jt\") pod \"fd93e625-16d6-49a2-b8cc-08381a234ac6\" (UID: \"fd93e625-16d6-49a2-b8cc-08381a234ac6\") " Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.437540 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd93e625-16d6-49a2-b8cc-08381a234ac6-config-volume" (OuterVolumeSpecName: "config-volume") pod "fd93e625-16d6-49a2-b8cc-08381a234ac6" (UID: "fd93e625-16d6-49a2-b8cc-08381a234ac6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.437828 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd93e625-16d6-49a2-b8cc-08381a234ac6-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.447298 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd93e625-16d6-49a2-b8cc-08381a234ac6-kube-api-access-rc5jt" (OuterVolumeSpecName: "kube-api-access-rc5jt") pod "fd93e625-16d6-49a2-b8cc-08381a234ac6" (UID: "fd93e625-16d6-49a2-b8cc-08381a234ac6"). InnerVolumeSpecName "kube-api-access-rc5jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.449465 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd93e625-16d6-49a2-b8cc-08381a234ac6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fd93e625-16d6-49a2-b8cc-08381a234ac6" (UID: "fd93e625-16d6-49a2-b8cc-08381a234ac6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.539536 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc5jt\" (UniqueName: \"kubernetes.io/projected/fd93e625-16d6-49a2-b8cc-08381a234ac6-kube-api-access-rc5jt\") on node \"crc\" DevicePath \"\"" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.539575 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd93e625-16d6-49a2-b8cc-08381a234ac6-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.758561 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" event={"ID":"fd93e625-16d6-49a2-b8cc-08381a234ac6","Type":"ContainerDied","Data":"42608b8a4d23cd61bf929e6fa52b90cbbf1bc2f3a37a36c0502e1461b670f081"} Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.758931 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42608b8a4d23cd61bf929e6fa52b90cbbf1bc2f3a37a36c0502e1461b670f081" Oct 03 14:45:03 crc kubenswrapper[4861]: I1003 14:45:03.758943 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325045-r5b8j" Oct 03 14:45:04 crc kubenswrapper[4861]: I1003 14:45:04.319695 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc"] Oct 03 14:45:04 crc kubenswrapper[4861]: I1003 14:45:04.326513 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-mqdpc"] Oct 03 14:45:04 crc kubenswrapper[4861]: I1003 14:45:04.681785 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:45:04 crc kubenswrapper[4861]: E1003 14:45:04.682080 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:45:04 crc kubenswrapper[4861]: I1003 14:45:04.717998 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be4ceb81-4a5e-4c11-995d-9e224ff2acb7" path="/var/lib/kubelet/pods/be4ceb81-4a5e-4c11-995d-9e224ff2acb7/volumes" Oct 03 14:45:17 crc kubenswrapper[4861]: I1003 14:45:17.681577 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:45:17 crc kubenswrapper[4861]: E1003 14:45:17.682436 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:45:30 crc kubenswrapper[4861]: I1003 14:45:30.681406 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:45:30 crc kubenswrapper[4861]: E1003 14:45:30.682187 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:45:45 crc kubenswrapper[4861]: I1003 14:45:45.681999 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:45:45 crc kubenswrapper[4861]: E1003 14:45:45.683106 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:45:59 crc kubenswrapper[4861]: I1003 14:45:59.681313 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:45:59 crc kubenswrapper[4861]: E1003 14:45:59.682037 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:45:59 crc kubenswrapper[4861]: I1003 14:45:59.966807 4861 scope.go:117] "RemoveContainer" containerID="18c220d513d2679ed116fc1a5b5f1f5f69d9c989bbd763a8fd3a2e5e546d0ab5" Oct 03 14:46:13 crc kubenswrapper[4861]: I1003 14:46:13.681808 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:46:13 crc kubenswrapper[4861]: E1003 14:46:13.682644 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.085905 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hzt9v"] Oct 03 14:46:24 crc kubenswrapper[4861]: E1003 14:46:24.086761 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd93e625-16d6-49a2-b8cc-08381a234ac6" containerName="collect-profiles" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.086773 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd93e625-16d6-49a2-b8cc-08381a234ac6" containerName="collect-profiles" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.086957 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd93e625-16d6-49a2-b8cc-08381a234ac6" containerName="collect-profiles" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.088268 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.110431 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hzt9v"] Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.189165 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-utilities\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.189291 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-catalog-content\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.189555 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6f54\" (UniqueName: \"kubernetes.io/projected/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-kube-api-access-q6f54\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.291624 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6f54\" (UniqueName: \"kubernetes.io/projected/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-kube-api-access-q6f54\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.291699 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-utilities\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.291725 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-catalog-content\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.292209 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-utilities\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.292265 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-catalog-content\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.322666 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6f54\" (UniqueName: \"kubernetes.io/projected/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-kube-api-access-q6f54\") pod \"certified-operators-hzt9v\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:24 crc kubenswrapper[4861]: I1003 14:46:24.409283 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:25 crc kubenswrapper[4861]: I1003 14:46:25.060760 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hzt9v"] Oct 03 14:46:25 crc kubenswrapper[4861]: I1003 14:46:25.480687 4861 generic.go:334] "Generic (PLEG): container finished" podID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerID="eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8" exitCode=0 Oct 03 14:46:25 crc kubenswrapper[4861]: I1003 14:46:25.480736 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerDied","Data":"eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8"} Oct 03 14:46:25 crc kubenswrapper[4861]: I1003 14:46:25.481131 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerStarted","Data":"c90996e9bbdc10959e15f308be30e439167512324d460c9740267a3147e032e0"} Oct 03 14:46:25 crc kubenswrapper[4861]: I1003 14:46:25.483581 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:46:26 crc kubenswrapper[4861]: I1003 14:46:26.491052 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerStarted","Data":"0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d"} Oct 03 14:46:28 crc kubenswrapper[4861]: I1003 14:46:28.682383 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:46:28 crc kubenswrapper[4861]: E1003 14:46:28.682793 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:46:30 crc kubenswrapper[4861]: I1003 14:46:30.526792 4861 generic.go:334] "Generic (PLEG): container finished" podID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerID="0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d" exitCode=0 Oct 03 14:46:30 crc kubenswrapper[4861]: I1003 14:46:30.526866 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerDied","Data":"0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d"} Oct 03 14:46:31 crc kubenswrapper[4861]: I1003 14:46:31.541178 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerStarted","Data":"56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612"} Oct 03 14:46:31 crc kubenswrapper[4861]: I1003 14:46:31.570463 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hzt9v" podStartSLOduration=2.125200298 podStartE2EDuration="7.570436906s" podCreationTimestamp="2025-10-03 14:46:24 +0000 UTC" firstStartedPulling="2025-10-03 14:46:25.483326485 +0000 UTC m=+4499.481311532" lastFinishedPulling="2025-10-03 14:46:30.928563093 +0000 UTC m=+4504.926548140" observedRunningTime="2025-10-03 14:46:31.564721526 +0000 UTC m=+4505.562706583" watchObservedRunningTime="2025-10-03 14:46:31.570436906 +0000 UTC m=+4505.568421953" Oct 03 14:46:34 crc kubenswrapper[4861]: I1003 14:46:34.410279 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:34 crc kubenswrapper[4861]: I1003 14:46:34.410613 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:34 crc kubenswrapper[4861]: I1003 14:46:34.490997 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:39 crc kubenswrapper[4861]: I1003 14:46:39.680697 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:46:39 crc kubenswrapper[4861]: E1003 14:46:39.681608 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:46:45 crc kubenswrapper[4861]: I1003 14:46:45.033997 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:45 crc kubenswrapper[4861]: I1003 14:46:45.087497 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hzt9v"] Oct 03 14:46:45 crc kubenswrapper[4861]: I1003 14:46:45.681573 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hzt9v" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="registry-server" containerID="cri-o://56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612" gracePeriod=2 Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.183013 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.231318 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-utilities" (OuterVolumeSpecName: "utilities") pod "ab07d19a-0654-4d1c-a7f7-8cc0fd010549" (UID: "ab07d19a-0654-4d1c-a7f7-8cc0fd010549"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.231407 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-utilities\") pod \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.231485 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-catalog-content\") pod \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.231549 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6f54\" (UniqueName: \"kubernetes.io/projected/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-kube-api-access-q6f54\") pod \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\" (UID: \"ab07d19a-0654-4d1c-a7f7-8cc0fd010549\") " Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.232069 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.245458 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-kube-api-access-q6f54" (OuterVolumeSpecName: "kube-api-access-q6f54") pod "ab07d19a-0654-4d1c-a7f7-8cc0fd010549" (UID: "ab07d19a-0654-4d1c-a7f7-8cc0fd010549"). InnerVolumeSpecName "kube-api-access-q6f54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.289482 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab07d19a-0654-4d1c-a7f7-8cc0fd010549" (UID: "ab07d19a-0654-4d1c-a7f7-8cc0fd010549"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.334088 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.334123 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6f54\" (UniqueName: \"kubernetes.io/projected/ab07d19a-0654-4d1c-a7f7-8cc0fd010549-kube-api-access-q6f54\") on node \"crc\" DevicePath \"\"" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.693686 4861 generic.go:334] "Generic (PLEG): container finished" podID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerID="56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612" exitCode=0 Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.693791 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hzt9v" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.694324 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerDied","Data":"56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612"} Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.694367 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hzt9v" event={"ID":"ab07d19a-0654-4d1c-a7f7-8cc0fd010549","Type":"ContainerDied","Data":"c90996e9bbdc10959e15f308be30e439167512324d460c9740267a3147e032e0"} Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.694390 4861 scope.go:117] "RemoveContainer" containerID="56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.725482 4861 scope.go:117] "RemoveContainer" containerID="0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.730567 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hzt9v"] Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.741339 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hzt9v"] Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.749140 4861 scope.go:117] "RemoveContainer" containerID="eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.798597 4861 scope.go:117] "RemoveContainer" containerID="56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612" Oct 03 14:46:46 crc kubenswrapper[4861]: E1003 14:46:46.799065 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612\": container with ID starting with 56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612 not found: ID does not exist" containerID="56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.799118 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612"} err="failed to get container status \"56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612\": rpc error: code = NotFound desc = could not find container \"56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612\": container with ID starting with 56876cc72568e25739aa79721142746f0244d71cc216a2be8671638f2f0b7612 not found: ID does not exist" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.799148 4861 scope.go:117] "RemoveContainer" containerID="0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d" Oct 03 14:46:46 crc kubenswrapper[4861]: E1003 14:46:46.799500 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d\": container with ID starting with 0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d not found: ID does not exist" containerID="0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.799550 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d"} err="failed to get container status \"0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d\": rpc error: code = NotFound desc = could not find container \"0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d\": container with ID starting with 0c2c35cbae0e0467841efddb04f11cb8702008deb609b476f2533da2e2b0a46d not found: ID does not exist" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.799568 4861 scope.go:117] "RemoveContainer" containerID="eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8" Oct 03 14:46:46 crc kubenswrapper[4861]: E1003 14:46:46.799945 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8\": container with ID starting with eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8 not found: ID does not exist" containerID="eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8" Oct 03 14:46:46 crc kubenswrapper[4861]: I1003 14:46:46.799974 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8"} err="failed to get container status \"eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8\": rpc error: code = NotFound desc = could not find container \"eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8\": container with ID starting with eb87ed65a0e34b4b2e5ad21e4b34aa1b7d9246b0a1b5326c215e209315c98ec8 not found: ID does not exist" Oct 03 14:46:48 crc kubenswrapper[4861]: I1003 14:46:48.691747 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" path="/var/lib/kubelet/pods/ab07d19a-0654-4d1c-a7f7-8cc0fd010549/volumes" Oct 03 14:46:54 crc kubenswrapper[4861]: I1003 14:46:54.681862 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:46:54 crc kubenswrapper[4861]: E1003 14:46:54.682752 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:47:06 crc kubenswrapper[4861]: I1003 14:47:06.690885 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:47:06 crc kubenswrapper[4861]: E1003 14:47:06.691589 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:47:21 crc kubenswrapper[4861]: I1003 14:47:21.681371 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:47:21 crc kubenswrapper[4861]: E1003 14:47:21.682397 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:47:32 crc kubenswrapper[4861]: I1003 14:47:32.681605 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:47:34 crc kubenswrapper[4861]: I1003 14:47:34.103771 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"c9fca1af93d570056d03200b887e5822727fbbd0c431d91df4b8a8541a504191"} Oct 03 14:49:54 crc kubenswrapper[4861]: I1003 14:49:54.990981 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7mgcb"] Oct 03 14:49:54 crc kubenswrapper[4861]: E1003 14:49:54.992189 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="extract-content" Oct 03 14:49:54 crc kubenswrapper[4861]: I1003 14:49:54.992206 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="extract-content" Oct 03 14:49:54 crc kubenswrapper[4861]: E1003 14:49:54.992275 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="extract-utilities" Oct 03 14:49:54 crc kubenswrapper[4861]: I1003 14:49:54.992285 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="extract-utilities" Oct 03 14:49:54 crc kubenswrapper[4861]: E1003 14:49:54.992306 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="registry-server" Oct 03 14:49:54 crc kubenswrapper[4861]: I1003 14:49:54.992315 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="registry-server" Oct 03 14:49:54 crc kubenswrapper[4861]: I1003 14:49:54.992547 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab07d19a-0654-4d1c-a7f7-8cc0fd010549" containerName="registry-server" Oct 03 14:49:54 crc kubenswrapper[4861]: I1003 14:49:54.994189 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.021770 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7mgcb"] Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.118185 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-utilities\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.118268 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-catalog-content\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.118320 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pjq2\" (UniqueName: \"kubernetes.io/projected/9c36e515-813e-4417-aee8-3ac17e285eb7-kube-api-access-5pjq2\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.221325 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-utilities\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.221698 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-catalog-content\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.221774 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pjq2\" (UniqueName: \"kubernetes.io/projected/9c36e515-813e-4417-aee8-3ac17e285eb7-kube-api-access-5pjq2\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.221908 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-utilities\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.222120 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-catalog-content\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.245744 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pjq2\" (UniqueName: \"kubernetes.io/projected/9c36e515-813e-4417-aee8-3ac17e285eb7-kube-api-access-5pjq2\") pod \"community-operators-7mgcb\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.316377 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:49:55 crc kubenswrapper[4861]: I1003 14:49:55.990305 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7mgcb"] Oct 03 14:49:56 crc kubenswrapper[4861]: I1003 14:49:56.416850 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerStarted","Data":"1016b4bef08f44b829cef8f143ed52aeddbcbdc41888d0eb6b08690bdee3d618"} Oct 03 14:49:57 crc kubenswrapper[4861]: I1003 14:49:57.427309 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerDied","Data":"2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e"} Oct 03 14:49:57 crc kubenswrapper[4861]: I1003 14:49:57.427170 4861 generic.go:334] "Generic (PLEG): container finished" podID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerID="2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e" exitCode=0 Oct 03 14:49:59 crc kubenswrapper[4861]: I1003 14:49:59.448350 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerStarted","Data":"cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5"} Oct 03 14:50:00 crc kubenswrapper[4861]: I1003 14:50:00.145099 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:50:00 crc kubenswrapper[4861]: I1003 14:50:00.145442 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:50:00 crc kubenswrapper[4861]: I1003 14:50:00.459018 4861 generic.go:334] "Generic (PLEG): container finished" podID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerID="cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5" exitCode=0 Oct 03 14:50:00 crc kubenswrapper[4861]: I1003 14:50:00.459068 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerDied","Data":"cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5"} Oct 03 14:50:01 crc kubenswrapper[4861]: I1003 14:50:01.471278 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerStarted","Data":"60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319"} Oct 03 14:50:01 crc kubenswrapper[4861]: I1003 14:50:01.495787 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7mgcb" podStartSLOduration=4.025727638 podStartE2EDuration="7.495765088s" podCreationTimestamp="2025-10-03 14:49:54 +0000 UTC" firstStartedPulling="2025-10-03 14:49:57.431529417 +0000 UTC m=+4711.429514464" lastFinishedPulling="2025-10-03 14:50:00.901566857 +0000 UTC m=+4714.899551914" observedRunningTime="2025-10-03 14:50:01.487520498 +0000 UTC m=+4715.485505545" watchObservedRunningTime="2025-10-03 14:50:01.495765088 +0000 UTC m=+4715.493750135" Oct 03 14:50:05 crc kubenswrapper[4861]: I1003 14:50:05.316778 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:50:05 crc kubenswrapper[4861]: I1003 14:50:05.319329 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:50:06 crc kubenswrapper[4861]: I1003 14:50:06.380323 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-7mgcb" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="registry-server" probeResult="failure" output=< Oct 03 14:50:06 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:50:06 crc kubenswrapper[4861]: > Oct 03 14:50:15 crc kubenswrapper[4861]: I1003 14:50:15.364435 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:50:15 crc kubenswrapper[4861]: I1003 14:50:15.415537 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:50:15 crc kubenswrapper[4861]: I1003 14:50:15.600692 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7mgcb"] Oct 03 14:50:16 crc kubenswrapper[4861]: I1003 14:50:16.613499 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7mgcb" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="registry-server" containerID="cri-o://60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319" gracePeriod=2 Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.300194 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.449854 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-catalog-content\") pod \"9c36e515-813e-4417-aee8-3ac17e285eb7\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.449957 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pjq2\" (UniqueName: \"kubernetes.io/projected/9c36e515-813e-4417-aee8-3ac17e285eb7-kube-api-access-5pjq2\") pod \"9c36e515-813e-4417-aee8-3ac17e285eb7\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.450070 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-utilities\") pod \"9c36e515-813e-4417-aee8-3ac17e285eb7\" (UID: \"9c36e515-813e-4417-aee8-3ac17e285eb7\") " Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.451039 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-utilities" (OuterVolumeSpecName: "utilities") pod "9c36e515-813e-4417-aee8-3ac17e285eb7" (UID: "9c36e515-813e-4417-aee8-3ac17e285eb7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.455692 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c36e515-813e-4417-aee8-3ac17e285eb7-kube-api-access-5pjq2" (OuterVolumeSpecName: "kube-api-access-5pjq2") pod "9c36e515-813e-4417-aee8-3ac17e285eb7" (UID: "9c36e515-813e-4417-aee8-3ac17e285eb7"). InnerVolumeSpecName "kube-api-access-5pjq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.514188 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c36e515-813e-4417-aee8-3ac17e285eb7" (UID: "9c36e515-813e-4417-aee8-3ac17e285eb7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.552751 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.552793 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pjq2\" (UniqueName: \"kubernetes.io/projected/9c36e515-813e-4417-aee8-3ac17e285eb7-kube-api-access-5pjq2\") on node \"crc\" DevicePath \"\"" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.552813 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c36e515-813e-4417-aee8-3ac17e285eb7-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.625645 4861 generic.go:334] "Generic (PLEG): container finished" podID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerID="60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319" exitCode=0 Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.625688 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerDied","Data":"60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319"} Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.625715 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mgcb" event={"ID":"9c36e515-813e-4417-aee8-3ac17e285eb7","Type":"ContainerDied","Data":"1016b4bef08f44b829cef8f143ed52aeddbcbdc41888d0eb6b08690bdee3d618"} Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.625732 4861 scope.go:117] "RemoveContainer" containerID="60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.627321 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mgcb" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.664282 4861 scope.go:117] "RemoveContainer" containerID="cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.691434 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7mgcb"] Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.699571 4861 scope.go:117] "RemoveContainer" containerID="2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.707964 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7mgcb"] Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.732700 4861 scope.go:117] "RemoveContainer" containerID="60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319" Oct 03 14:50:17 crc kubenswrapper[4861]: E1003 14:50:17.733184 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319\": container with ID starting with 60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319 not found: ID does not exist" containerID="60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.733333 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319"} err="failed to get container status \"60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319\": rpc error: code = NotFound desc = could not find container \"60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319\": container with ID starting with 60c9bb048bf29db5a22c7a1a72c8cf86c9588877afaad028e405265916932319 not found: ID does not exist" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.733430 4861 scope.go:117] "RemoveContainer" containerID="cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5" Oct 03 14:50:17 crc kubenswrapper[4861]: E1003 14:50:17.734113 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5\": container with ID starting with cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5 not found: ID does not exist" containerID="cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.734246 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5"} err="failed to get container status \"cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5\": rpc error: code = NotFound desc = could not find container \"cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5\": container with ID starting with cc64699314f0ab23a96da6fddb02b68114952b04b4bc2301d46df79f685bd4e5 not found: ID does not exist" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.734335 4861 scope.go:117] "RemoveContainer" containerID="2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e" Oct 03 14:50:17 crc kubenswrapper[4861]: E1003 14:50:17.734644 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e\": container with ID starting with 2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e not found: ID does not exist" containerID="2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e" Oct 03 14:50:17 crc kubenswrapper[4861]: I1003 14:50:17.734737 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e"} err="failed to get container status \"2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e\": rpc error: code = NotFound desc = could not find container \"2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e\": container with ID starting with 2aca90cc8baf7277d18c7f5d701bee0450cbbc5fd36bdf8e757175b692c8cc2e not found: ID does not exist" Oct 03 14:50:18 crc kubenswrapper[4861]: I1003 14:50:18.693991 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" path="/var/lib/kubelet/pods/9c36e515-813e-4417-aee8-3ac17e285eb7/volumes" Oct 03 14:50:30 crc kubenswrapper[4861]: I1003 14:50:30.145437 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:50:30 crc kubenswrapper[4861]: I1003 14:50:30.145920 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.354546 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kddlg"] Oct 03 14:50:48 crc kubenswrapper[4861]: E1003 14:50:48.355488 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="extract-content" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.355504 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="extract-content" Oct 03 14:50:48 crc kubenswrapper[4861]: E1003 14:50:48.355532 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="registry-server" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.355540 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="registry-server" Oct 03 14:50:48 crc kubenswrapper[4861]: E1003 14:50:48.355569 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="extract-utilities" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.355579 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="extract-utilities" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.355815 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c36e515-813e-4417-aee8-3ac17e285eb7" containerName="registry-server" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.357668 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.374330 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kddlg"] Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.465384 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl25v\" (UniqueName: \"kubernetes.io/projected/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-kube-api-access-cl25v\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.465813 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-utilities\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.465969 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-catalog-content\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.568263 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-utilities\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.568529 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-catalog-content\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.568650 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl25v\" (UniqueName: \"kubernetes.io/projected/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-kube-api-access-cl25v\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.568801 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-utilities\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.568856 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-catalog-content\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.589039 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl25v\" (UniqueName: \"kubernetes.io/projected/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-kube-api-access-cl25v\") pod \"redhat-operators-kddlg\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:48 crc kubenswrapper[4861]: I1003 14:50:48.693764 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.193989 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kddlg"] Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.752891 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z8png"] Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.756459 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.762905 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z8png"] Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.895421 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-utilities\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.895480 4861 generic.go:334] "Generic (PLEG): container finished" podID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerID="ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b" exitCode=0 Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.895495 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvwj9\" (UniqueName: \"kubernetes.io/projected/9fc733fa-f039-485a-ac01-ed0397e4f98f-kube-api-access-fvwj9\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.895598 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-catalog-content\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.895721 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerDied","Data":"ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b"} Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.895777 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerStarted","Data":"47201b8fdfbfa4716ad188e7c848b3ab66bdcbfbc103f3d93fcecd06964855b8"} Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.998459 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-utilities\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.998865 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvwj9\" (UniqueName: \"kubernetes.io/projected/9fc733fa-f039-485a-ac01-ed0397e4f98f-kube-api-access-fvwj9\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.998902 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-catalog-content\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.999473 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-catalog-content\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:49 crc kubenswrapper[4861]: I1003 14:50:49.999774 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-utilities\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:50 crc kubenswrapper[4861]: I1003 14:50:50.022054 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvwj9\" (UniqueName: \"kubernetes.io/projected/9fc733fa-f039-485a-ac01-ed0397e4f98f-kube-api-access-fvwj9\") pod \"redhat-marketplace-z8png\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:50 crc kubenswrapper[4861]: I1003 14:50:50.077067 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:50:50 crc kubenswrapper[4861]: W1003 14:50:50.556401 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fc733fa_f039_485a_ac01_ed0397e4f98f.slice/crio-b74fa9c5375239cedfdbd6f44c34c01e98be4ba67d2d3c7027c540bfca2db25d WatchSource:0}: Error finding container b74fa9c5375239cedfdbd6f44c34c01e98be4ba67d2d3c7027c540bfca2db25d: Status 404 returned error can't find the container with id b74fa9c5375239cedfdbd6f44c34c01e98be4ba67d2d3c7027c540bfca2db25d Oct 03 14:50:50 crc kubenswrapper[4861]: I1003 14:50:50.558493 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z8png"] Oct 03 14:50:50 crc kubenswrapper[4861]: I1003 14:50:50.905549 4861 generic.go:334] "Generic (PLEG): container finished" podID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerID="94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e" exitCode=0 Oct 03 14:50:50 crc kubenswrapper[4861]: I1003 14:50:50.905878 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerDied","Data":"94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e"} Oct 03 14:50:50 crc kubenswrapper[4861]: I1003 14:50:50.905945 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerStarted","Data":"b74fa9c5375239cedfdbd6f44c34c01e98be4ba67d2d3c7027c540bfca2db25d"} Oct 03 14:50:51 crc kubenswrapper[4861]: I1003 14:50:51.915944 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerStarted","Data":"4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559"} Oct 03 14:50:51 crc kubenswrapper[4861]: I1003 14:50:51.919733 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerStarted","Data":"732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0"} Oct 03 14:50:53 crc kubenswrapper[4861]: I1003 14:50:53.937077 4861 generic.go:334] "Generic (PLEG): container finished" podID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerID="4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559" exitCode=0 Oct 03 14:50:53 crc kubenswrapper[4861]: I1003 14:50:53.937156 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerDied","Data":"4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559"} Oct 03 14:50:54 crc kubenswrapper[4861]: I1003 14:50:54.956419 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerStarted","Data":"02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef"} Oct 03 14:50:54 crc kubenswrapper[4861]: I1003 14:50:54.986273 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-z8png" podStartSLOduration=2.270515792 podStartE2EDuration="5.986252412s" podCreationTimestamp="2025-10-03 14:50:49 +0000 UTC" firstStartedPulling="2025-10-03 14:50:50.909104367 +0000 UTC m=+4764.907089414" lastFinishedPulling="2025-10-03 14:50:54.624840977 +0000 UTC m=+4768.622826034" observedRunningTime="2025-10-03 14:50:54.974681494 +0000 UTC m=+4768.972666551" watchObservedRunningTime="2025-10-03 14:50:54.986252412 +0000 UTC m=+4768.984237469" Oct 03 14:50:55 crc kubenswrapper[4861]: I1003 14:50:55.966168 4861 generic.go:334] "Generic (PLEG): container finished" podID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerID="732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0" exitCode=0 Oct 03 14:50:55 crc kubenswrapper[4861]: I1003 14:50:55.966555 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerDied","Data":"732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0"} Oct 03 14:50:56 crc kubenswrapper[4861]: I1003 14:50:56.978569 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerStarted","Data":"3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d"} Oct 03 14:50:58 crc kubenswrapper[4861]: I1003 14:50:58.694831 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:58 crc kubenswrapper[4861]: I1003 14:50:58.695154 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:50:59 crc kubenswrapper[4861]: I1003 14:50:59.746799 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kddlg" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" probeResult="failure" output=< Oct 03 14:50:59 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:50:59 crc kubenswrapper[4861]: > Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.077740 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.077796 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.129944 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.144992 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.145063 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.145117 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.145944 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c9fca1af93d570056d03200b887e5822727fbbd0c431d91df4b8a8541a504191"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.146013 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://c9fca1af93d570056d03200b887e5822727fbbd0c431d91df4b8a8541a504191" gracePeriod=600 Oct 03 14:51:00 crc kubenswrapper[4861]: I1003 14:51:00.151906 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kddlg" podStartSLOduration=5.628937969 podStartE2EDuration="12.151883997s" podCreationTimestamp="2025-10-03 14:50:48 +0000 UTC" firstStartedPulling="2025-10-03 14:50:49.897028776 +0000 UTC m=+4763.895013823" lastFinishedPulling="2025-10-03 14:50:56.419974804 +0000 UTC m=+4770.417959851" observedRunningTime="2025-10-03 14:50:57.005175646 +0000 UTC m=+4771.003160713" watchObservedRunningTime="2025-10-03 14:51:00.151883997 +0000 UTC m=+4774.149869044" Oct 03 14:51:01 crc kubenswrapper[4861]: I1003 14:51:01.018504 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="c9fca1af93d570056d03200b887e5822727fbbd0c431d91df4b8a8541a504191" exitCode=0 Oct 03 14:51:01 crc kubenswrapper[4861]: I1003 14:51:01.018588 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"c9fca1af93d570056d03200b887e5822727fbbd0c431d91df4b8a8541a504191"} Oct 03 14:51:01 crc kubenswrapper[4861]: I1003 14:51:01.019436 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132"} Oct 03 14:51:01 crc kubenswrapper[4861]: I1003 14:51:01.019479 4861 scope.go:117] "RemoveContainer" containerID="9cc426a24e7af0ab4a5086c70c2d2103d9a0bf02f0906c97e1bde93e1a0d2c12" Oct 03 14:51:01 crc kubenswrapper[4861]: I1003 14:51:01.096967 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:51:01 crc kubenswrapper[4861]: I1003 14:51:01.346609 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z8png"] Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.037368 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-z8png" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="registry-server" containerID="cri-o://02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef" gracePeriod=2 Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.515860 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.656946 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvwj9\" (UniqueName: \"kubernetes.io/projected/9fc733fa-f039-485a-ac01-ed0397e4f98f-kube-api-access-fvwj9\") pod \"9fc733fa-f039-485a-ac01-ed0397e4f98f\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.657360 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-catalog-content\") pod \"9fc733fa-f039-485a-ac01-ed0397e4f98f\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.657616 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-utilities\") pod \"9fc733fa-f039-485a-ac01-ed0397e4f98f\" (UID: \"9fc733fa-f039-485a-ac01-ed0397e4f98f\") " Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.658721 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-utilities" (OuterVolumeSpecName: "utilities") pod "9fc733fa-f039-485a-ac01-ed0397e4f98f" (UID: "9fc733fa-f039-485a-ac01-ed0397e4f98f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.671125 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fc733fa-f039-485a-ac01-ed0397e4f98f" (UID: "9fc733fa-f039-485a-ac01-ed0397e4f98f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.671480 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fc733fa-f039-485a-ac01-ed0397e4f98f-kube-api-access-fvwj9" (OuterVolumeSpecName: "kube-api-access-fvwj9") pod "9fc733fa-f039-485a-ac01-ed0397e4f98f" (UID: "9fc733fa-f039-485a-ac01-ed0397e4f98f"). InnerVolumeSpecName "kube-api-access-fvwj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.760026 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.760066 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvwj9\" (UniqueName: \"kubernetes.io/projected/9fc733fa-f039-485a-ac01-ed0397e4f98f-kube-api-access-fvwj9\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:03 crc kubenswrapper[4861]: I1003 14:51:03.760079 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc733fa-f039-485a-ac01-ed0397e4f98f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.046525 4861 generic.go:334] "Generic (PLEG): container finished" podID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerID="02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef" exitCode=0 Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.046562 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerDied","Data":"02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef"} Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.046586 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z8png" event={"ID":"9fc733fa-f039-485a-ac01-ed0397e4f98f","Type":"ContainerDied","Data":"b74fa9c5375239cedfdbd6f44c34c01e98be4ba67d2d3c7027c540bfca2db25d"} Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.046602 4861 scope.go:117] "RemoveContainer" containerID="02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.046714 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z8png" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.078368 4861 scope.go:117] "RemoveContainer" containerID="4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.079709 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z8png"] Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.091960 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-z8png"] Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.104750 4861 scope.go:117] "RemoveContainer" containerID="94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.141914 4861 scope.go:117] "RemoveContainer" containerID="02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef" Oct 03 14:51:04 crc kubenswrapper[4861]: E1003 14:51:04.142190 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef\": container with ID starting with 02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef not found: ID does not exist" containerID="02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.142225 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef"} err="failed to get container status \"02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef\": rpc error: code = NotFound desc = could not find container \"02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef\": container with ID starting with 02857bd4010c659892e86100c99e901c24fb3d48fcd1429d72e2dc40ad660aef not found: ID does not exist" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.142310 4861 scope.go:117] "RemoveContainer" containerID="4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559" Oct 03 14:51:04 crc kubenswrapper[4861]: E1003 14:51:04.142600 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559\": container with ID starting with 4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559 not found: ID does not exist" containerID="4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.142621 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559"} err="failed to get container status \"4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559\": rpc error: code = NotFound desc = could not find container \"4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559\": container with ID starting with 4fe20fa26329d15538e6de6c37a466030f5edf01555359fae7675e2782c79559 not found: ID does not exist" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.142636 4861 scope.go:117] "RemoveContainer" containerID="94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e" Oct 03 14:51:04 crc kubenswrapper[4861]: E1003 14:51:04.143053 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e\": container with ID starting with 94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e not found: ID does not exist" containerID="94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.143070 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e"} err="failed to get container status \"94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e\": rpc error: code = NotFound desc = could not find container \"94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e\": container with ID starting with 94832d88c647b05b0c8f0ada079d35fdc96d52b42cb202e600d9758b5066fd4e not found: ID does not exist" Oct 03 14:51:04 crc kubenswrapper[4861]: I1003 14:51:04.692621 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" path="/var/lib/kubelet/pods/9fc733fa-f039-485a-ac01-ed0397e4f98f/volumes" Oct 03 14:51:09 crc kubenswrapper[4861]: I1003 14:51:09.760064 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kddlg" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" probeResult="failure" output=< Oct 03 14:51:09 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:51:09 crc kubenswrapper[4861]: > Oct 03 14:51:19 crc kubenswrapper[4861]: I1003 14:51:19.757472 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kddlg" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" probeResult="failure" output=< Oct 03 14:51:19 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 14:51:19 crc kubenswrapper[4861]: > Oct 03 14:51:28 crc kubenswrapper[4861]: I1003 14:51:28.754475 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:51:28 crc kubenswrapper[4861]: I1003 14:51:28.818250 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:51:29 crc kubenswrapper[4861]: I1003 14:51:29.009684 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kddlg"] Oct 03 14:51:30 crc kubenswrapper[4861]: I1003 14:51:30.278095 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kddlg" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" containerID="cri-o://3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d" gracePeriod=2 Oct 03 14:51:30 crc kubenswrapper[4861]: I1003 14:51:30.956516 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.101315 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl25v\" (UniqueName: \"kubernetes.io/projected/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-kube-api-access-cl25v\") pod \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.101356 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-catalog-content\") pod \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.101417 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-utilities\") pod \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\" (UID: \"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b\") " Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.102533 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-utilities" (OuterVolumeSpecName: "utilities") pod "ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" (UID: "ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.112027 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-kube-api-access-cl25v" (OuterVolumeSpecName: "kube-api-access-cl25v") pod "ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" (UID: "ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b"). InnerVolumeSpecName "kube-api-access-cl25v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.193935 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" (UID: "ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.203957 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl25v\" (UniqueName: \"kubernetes.io/projected/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-kube-api-access-cl25v\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.203982 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.203992 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.292070 4861 generic.go:334] "Generic (PLEG): container finished" podID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerID="3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d" exitCode=0 Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.292120 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerDied","Data":"3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d"} Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.292131 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kddlg" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.292157 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kddlg" event={"ID":"ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b","Type":"ContainerDied","Data":"47201b8fdfbfa4716ad188e7c848b3ab66bdcbfbc103f3d93fcecd06964855b8"} Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.292179 4861 scope.go:117] "RemoveContainer" containerID="3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.320173 4861 scope.go:117] "RemoveContainer" containerID="732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.327975 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kddlg"] Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.349621 4861 scope.go:117] "RemoveContainer" containerID="ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.366962 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kddlg"] Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.411510 4861 scope.go:117] "RemoveContainer" containerID="3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d" Oct 03 14:51:31 crc kubenswrapper[4861]: E1003 14:51:31.412217 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d\": container with ID starting with 3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d not found: ID does not exist" containerID="3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.412307 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d"} err="failed to get container status \"3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d\": rpc error: code = NotFound desc = could not find container \"3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d\": container with ID starting with 3a2dc94d660e03f813fc94d5ba47b5339dad75a36a5895639a9bb78e57dabd0d not found: ID does not exist" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.412337 4861 scope.go:117] "RemoveContainer" containerID="732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0" Oct 03 14:51:31 crc kubenswrapper[4861]: E1003 14:51:31.413358 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0\": container with ID starting with 732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0 not found: ID does not exist" containerID="732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.413390 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0"} err="failed to get container status \"732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0\": rpc error: code = NotFound desc = could not find container \"732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0\": container with ID starting with 732cd89cd323edadba9fad771deafb22264e1f0cd9d1f528122fe9b57a1503e0 not found: ID does not exist" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.413409 4861 scope.go:117] "RemoveContainer" containerID="ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b" Oct 03 14:51:31 crc kubenswrapper[4861]: E1003 14:51:31.413631 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b\": container with ID starting with ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b not found: ID does not exist" containerID="ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b" Oct 03 14:51:31 crc kubenswrapper[4861]: I1003 14:51:31.413656 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b"} err="failed to get container status \"ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b\": rpc error: code = NotFound desc = could not find container \"ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b\": container with ID starting with ad1d2f0ce55171e519e1362068b4c7ad2549af3551c70174b85f3ded4417638b not found: ID does not exist" Oct 03 14:51:32 crc kubenswrapper[4861]: I1003 14:51:32.305050 4861 generic.go:334] "Generic (PLEG): container finished" podID="a0580466-6d38-4ad0-a84e-dcf312f06369" containerID="3afac97c25d4f292e6b4e9f0b740bdb1ac01e8671d3b175e72d784c36616e72f" exitCode=0 Oct 03 14:51:32 crc kubenswrapper[4861]: I1003 14:51:32.305157 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a0580466-6d38-4ad0-a84e-dcf312f06369","Type":"ContainerDied","Data":"3afac97c25d4f292e6b4e9f0b740bdb1ac01e8671d3b175e72d784c36616e72f"} Oct 03 14:51:32 crc kubenswrapper[4861]: I1003 14:51:32.690952 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" path="/var/lib/kubelet/pods/ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b/volumes" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.698503 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852713 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-temporary\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852750 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-workdir\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852789 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ca-certs\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852824 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ssh-key\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852911 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config-secret\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852928 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.852976 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.853037 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnqjn\" (UniqueName: \"kubernetes.io/projected/a0580466-6d38-4ad0-a84e-dcf312f06369-kube-api-access-cnqjn\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.853096 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-config-data\") pod \"a0580466-6d38-4ad0-a84e-dcf312f06369\" (UID: \"a0580466-6d38-4ad0-a84e-dcf312f06369\") " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.853528 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.853956 4861 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.854730 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-config-data" (OuterVolumeSpecName: "config-data") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.859919 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.866151 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0580466-6d38-4ad0-a84e-dcf312f06369-kube-api-access-cnqjn" (OuterVolumeSpecName: "kube-api-access-cnqjn") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "kube-api-access-cnqjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.882564 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.887561 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.891052 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.895703 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.914274 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a0580466-6d38-4ad0-a84e-dcf312f06369" (UID: "a0580466-6d38-4ad0-a84e-dcf312f06369"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.955974 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.956179 4861 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a0580466-6d38-4ad0-a84e-dcf312f06369-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.956267 4861 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.956330 4861 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.956427 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.956510 4861 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0580466-6d38-4ad0-a84e-dcf312f06369-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.957264 4861 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.957372 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnqjn\" (UniqueName: \"kubernetes.io/projected/a0580466-6d38-4ad0-a84e-dcf312f06369-kube-api-access-cnqjn\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:33 crc kubenswrapper[4861]: I1003 14:51:33.986145 4861 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 03 14:51:34 crc kubenswrapper[4861]: I1003 14:51:34.058834 4861 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 03 14:51:34 crc kubenswrapper[4861]: I1003 14:51:34.322886 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a0580466-6d38-4ad0-a84e-dcf312f06369","Type":"ContainerDied","Data":"3467008d8ee388ac6c2e863c527e12c297105e11a2602600f2bf2f43a4703954"} Oct 03 14:51:34 crc kubenswrapper[4861]: I1003 14:51:34.322920 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3467008d8ee388ac6c2e863c527e12c297105e11a2602600f2bf2f43a4703954" Oct 03 14:51:34 crc kubenswrapper[4861]: I1003 14:51:34.323171 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.149553 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150555 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="extract-utilities" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150574 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="extract-utilities" Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150589 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0580466-6d38-4ad0-a84e-dcf312f06369" containerName="tempest-tests-tempest-tests-runner" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150599 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0580466-6d38-4ad0-a84e-dcf312f06369" containerName="tempest-tests-tempest-tests-runner" Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150637 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="extract-utilities" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150649 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="extract-utilities" Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150672 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="registry-server" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150683 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="registry-server" Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150694 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150700 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150749 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="extract-content" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150757 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="extract-content" Oct 03 14:51:45 crc kubenswrapper[4861]: E1003 14:51:45.150776 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="extract-content" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.150782 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="extract-content" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.151067 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fc733fa-f039-485a-ac01-ed0397e4f98f" containerName="registry-server" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.151094 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0580466-6d38-4ad0-a84e-dcf312f06369" containerName="tempest-tests-tempest-tests-runner" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.151109 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1218ba-ddc7-4368-bc3f-a15fc3a5b79b" containerName="registry-server" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.152038 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.154544 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-829rv" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.164451 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.282927 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.283261 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4fpp\" (UniqueName: \"kubernetes.io/projected/9b4f3282-6471-451a-95d3-5a1803ae69ae-kube-api-access-k4fpp\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.385698 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.385756 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4fpp\" (UniqueName: \"kubernetes.io/projected/9b4f3282-6471-451a-95d3-5a1803ae69ae-kube-api-access-k4fpp\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.387711 4861 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.429188 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4fpp\" (UniqueName: \"kubernetes.io/projected/9b4f3282-6471-451a-95d3-5a1803ae69ae-kube-api-access-k4fpp\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.491639 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9b4f3282-6471-451a-95d3-5a1803ae69ae\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:45 crc kubenswrapper[4861]: I1003 14:51:45.770122 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 14:51:46 crc kubenswrapper[4861]: I1003 14:51:46.307889 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 03 14:51:46 crc kubenswrapper[4861]: W1003 14:51:46.310056 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b4f3282_6471_451a_95d3_5a1803ae69ae.slice/crio-8cd3fcbeea7c42450295ca52d2a82bf0da152ac983eebb6f8971279300f6cd9c WatchSource:0}: Error finding container 8cd3fcbeea7c42450295ca52d2a82bf0da152ac983eebb6f8971279300f6cd9c: Status 404 returned error can't find the container with id 8cd3fcbeea7c42450295ca52d2a82bf0da152ac983eebb6f8971279300f6cd9c Oct 03 14:51:46 crc kubenswrapper[4861]: I1003 14:51:46.314368 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:51:46 crc kubenswrapper[4861]: I1003 14:51:46.467575 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9b4f3282-6471-451a-95d3-5a1803ae69ae","Type":"ContainerStarted","Data":"8cd3fcbeea7c42450295ca52d2a82bf0da152ac983eebb6f8971279300f6cd9c"} Oct 03 14:51:47 crc kubenswrapper[4861]: I1003 14:51:47.480844 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9b4f3282-6471-451a-95d3-5a1803ae69ae","Type":"ContainerStarted","Data":"f35322f2eda0376a4a77df70861a3696afcf8ffcca849f7273afb638a8eb98f1"} Oct 03 14:51:47 crc kubenswrapper[4861]: I1003 14:51:47.504927 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.5575922979999999 podStartE2EDuration="2.504883923s" podCreationTimestamp="2025-10-03 14:51:45 +0000 UTC" firstStartedPulling="2025-10-03 14:51:46.314125847 +0000 UTC m=+4820.312110904" lastFinishedPulling="2025-10-03 14:51:47.261417492 +0000 UTC m=+4821.259402529" observedRunningTime="2025-10-03 14:51:47.493339645 +0000 UTC m=+4821.491324702" watchObservedRunningTime="2025-10-03 14:51:47.504883923 +0000 UTC m=+4821.502869000" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.442576 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5glhl/must-gather-wlfgj"] Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.445190 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.447977 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-5glhl"/"default-dockercfg-7grn6" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.448006 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5glhl"/"openshift-service-ca.crt" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.448308 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5glhl"/"kube-root-ca.crt" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.454881 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5glhl/must-gather-wlfgj"] Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.567016 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd48n\" (UniqueName: \"kubernetes.io/projected/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-kube-api-access-jd48n\") pod \"must-gather-wlfgj\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.567170 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-must-gather-output\") pod \"must-gather-wlfgj\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.668807 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd48n\" (UniqueName: \"kubernetes.io/projected/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-kube-api-access-jd48n\") pod \"must-gather-wlfgj\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.668906 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-must-gather-output\") pod \"must-gather-wlfgj\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.669549 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-must-gather-output\") pod \"must-gather-wlfgj\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.686131 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd48n\" (UniqueName: \"kubernetes.io/projected/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-kube-api-access-jd48n\") pod \"must-gather-wlfgj\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:05 crc kubenswrapper[4861]: I1003 14:52:05.765371 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:52:06 crc kubenswrapper[4861]: I1003 14:52:06.210768 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5glhl/must-gather-wlfgj"] Oct 03 14:52:06 crc kubenswrapper[4861]: I1003 14:52:06.637441 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/must-gather-wlfgj" event={"ID":"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac","Type":"ContainerStarted","Data":"c809b92c56de5a9caa80470bd1c3f5439558553fa5102a95ab33737e8a59b81f"} Oct 03 14:52:11 crc kubenswrapper[4861]: I1003 14:52:11.700749 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/must-gather-wlfgj" event={"ID":"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac","Type":"ContainerStarted","Data":"e1d3f468dd297b00738b232908ce413f495ea3f23aaa84badb27c7810578650b"} Oct 03 14:52:12 crc kubenswrapper[4861]: I1003 14:52:12.723545 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/must-gather-wlfgj" event={"ID":"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac","Type":"ContainerStarted","Data":"0ac1273bed044b44515dce1247ca39b3c87939931099f4b912aa066ea044192b"} Oct 03 14:52:12 crc kubenswrapper[4861]: I1003 14:52:12.743761 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5glhl/must-gather-wlfgj" podStartSLOduration=3.37410025 podStartE2EDuration="7.743730922s" podCreationTimestamp="2025-10-03 14:52:05 +0000 UTC" firstStartedPulling="2025-10-03 14:52:06.221607075 +0000 UTC m=+4840.219592122" lastFinishedPulling="2025-10-03 14:52:10.591237747 +0000 UTC m=+4844.589222794" observedRunningTime="2025-10-03 14:52:12.738689677 +0000 UTC m=+4846.736674744" watchObservedRunningTime="2025-10-03 14:52:12.743730922 +0000 UTC m=+4846.741716019" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.511859 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5glhl/crc-debug-jrrnv"] Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.513649 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.696526 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss2kj\" (UniqueName: \"kubernetes.io/projected/0f8ffc26-6a81-439d-b527-587280137d65-kube-api-access-ss2kj\") pod \"crc-debug-jrrnv\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.696597 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0f8ffc26-6a81-439d-b527-587280137d65-host\") pod \"crc-debug-jrrnv\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.798718 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0f8ffc26-6a81-439d-b527-587280137d65-host\") pod \"crc-debug-jrrnv\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.798848 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0f8ffc26-6a81-439d-b527-587280137d65-host\") pod \"crc-debug-jrrnv\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.799935 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss2kj\" (UniqueName: \"kubernetes.io/projected/0f8ffc26-6a81-439d-b527-587280137d65-kube-api-access-ss2kj\") pod \"crc-debug-jrrnv\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.835980 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss2kj\" (UniqueName: \"kubernetes.io/projected/0f8ffc26-6a81-439d-b527-587280137d65-kube-api-access-ss2kj\") pod \"crc-debug-jrrnv\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:16 crc kubenswrapper[4861]: I1003 14:52:16.839707 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:52:17 crc kubenswrapper[4861]: I1003 14:52:17.789338 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" event={"ID":"0f8ffc26-6a81-439d-b527-587280137d65","Type":"ContainerStarted","Data":"b20fb79a0f988fcbcf80836c14bf562fd3ccf7951b0fbb818faf3b6656ff0e34"} Oct 03 14:52:26 crc kubenswrapper[4861]: I1003 14:52:26.552775 4861 patch_prober.go:28] interesting pod/console-5557b87f9c-zzzj9 container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.46:8443/health\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 14:52:26 crc kubenswrapper[4861]: I1003 14:52:26.553346 4861 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-5557b87f9c-zzzj9" podUID="2a5db69d-e844-4484-978a-bc7461fcd2f3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.46:8443/health\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 14:52:41 crc kubenswrapper[4861]: E1003 14:52:41.090504 4861 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Oct 03 14:52:41 crc kubenswrapper[4861]: E1003 14:52:41.094613 4861 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ss2kj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-jrrnv_openshift-must-gather-5glhl(0f8ffc26-6a81-439d-b527-587280137d65): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 14:52:41 crc kubenswrapper[4861]: E1003 14:52:41.095886 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" podUID="0f8ffc26-6a81-439d-b527-587280137d65" Oct 03 14:52:42 crc kubenswrapper[4861]: E1003 14:52:42.018114 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" podUID="0f8ffc26-6a81-439d-b527-587280137d65" Oct 03 14:52:55 crc kubenswrapper[4861]: I1003 14:52:55.125680 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" event={"ID":"0f8ffc26-6a81-439d-b527-587280137d65","Type":"ContainerStarted","Data":"1aaf47a529585f987156745c94e49df181436cc02df40537792c398f16030b3f"} Oct 03 14:53:00 crc kubenswrapper[4861]: I1003 14:53:00.147014 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:53:00 crc kubenswrapper[4861]: I1003 14:53:00.148783 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:53:30 crc kubenswrapper[4861]: I1003 14:53:30.144640 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:53:30 crc kubenswrapper[4861]: I1003 14:53:30.145190 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:53:49 crc kubenswrapper[4861]: I1003 14:53:49.486840 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64fc59b774-2zcpl_cf1005d0-0ea7-4d8a-bec8-445949aa9162/barbican-api/0.log" Oct 03 14:53:49 crc kubenswrapper[4861]: I1003 14:53:49.551211 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64fc59b774-2zcpl_cf1005d0-0ea7-4d8a-bec8-445949aa9162/barbican-api-log/0.log" Oct 03 14:53:49 crc kubenswrapper[4861]: I1003 14:53:49.740985 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b56bc9586-w989t_fc6983c6-4e21-49b6-a48a-f062bb5afd49/barbican-keystone-listener/0.log" Oct 03 14:53:49 crc kubenswrapper[4861]: I1003 14:53:49.858559 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b56bc9586-w989t_fc6983c6-4e21-49b6-a48a-f062bb5afd49/barbican-keystone-listener-log/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.073195 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79b8bfc47f-5jchw_ba4d9d03-a7d0-46ed-8429-008882213b57/barbican-worker/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.087799 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79b8bfc47f-5jchw_ba4d9d03-a7d0-46ed-8429-008882213b57/barbican-worker-log/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.424822 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n_88cfdf25-6cf6-4553-a95b-d49e13d2f509/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.666528 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/ceilometer-central-agent/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.770886 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/ceilometer-notification-agent/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.821165 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/proxy-httpd/0.log" Oct 03 14:53:50 crc kubenswrapper[4861]: I1003 14:53:50.970963 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/sg-core/0.log" Oct 03 14:53:51 crc kubenswrapper[4861]: I1003 14:53:51.138617 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_de8f27cb-a40f-4ab0-b709-4abad3ff72bb/cinder-api/0.log" Oct 03 14:53:51 crc kubenswrapper[4861]: I1003 14:53:51.283837 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_de8f27cb-a40f-4ab0-b709-4abad3ff72bb/cinder-api-log/0.log" Oct 03 14:53:51 crc kubenswrapper[4861]: I1003 14:53:51.376939 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8af097a4-c83f-4687-9804-fded6b1eb9ac/cinder-scheduler/0.log" Oct 03 14:53:51 crc kubenswrapper[4861]: I1003 14:53:51.602572 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9_15a74413-2b7d-42e1-9b05-e50d739dfd39/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:51 crc kubenswrapper[4861]: I1003 14:53:51.620530 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8af097a4-c83f-4687-9804-fded6b1eb9ac/probe/0.log" Oct 03 14:53:51 crc kubenswrapper[4861]: I1003 14:53:51.889332 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-528b8_2b2349a3-d6ca-4e6f-a564-03dac17e4746/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:52 crc kubenswrapper[4861]: I1003 14:53:52.486822 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-smz4r_f353c9e7-1d8f-4084-b475-0c725858f034/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:52 crc kubenswrapper[4861]: I1003 14:53:52.652548 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-xjmx4_f6f37303-3f4a-44b7-aef2-ed92a6c277e2/init/0.log" Oct 03 14:53:52 crc kubenswrapper[4861]: I1003 14:53:52.797036 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-xjmx4_f6f37303-3f4a-44b7-aef2-ed92a6c277e2/init/0.log" Oct 03 14:53:52 crc kubenswrapper[4861]: I1003 14:53:52.944211 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-xjmx4_f6f37303-3f4a-44b7-aef2-ed92a6c277e2/dnsmasq-dns/0.log" Oct 03 14:53:53 crc kubenswrapper[4861]: I1003 14:53:53.120349 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn_e5903a2f-8943-4fab-8ddf-6ec1b8329590/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:53 crc kubenswrapper[4861]: I1003 14:53:53.231596 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3f0d752d-7682-4244-9682-bf78e9a9d8ec/glance-httpd/0.log" Oct 03 14:53:53 crc kubenswrapper[4861]: I1003 14:53:53.295960 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3f0d752d-7682-4244-9682-bf78e9a9d8ec/glance-log/0.log" Oct 03 14:53:54 crc kubenswrapper[4861]: I1003 14:53:54.214956 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a2693b41-01f0-48e9-b551-fa6c48d29531/glance-httpd/0.log" Oct 03 14:53:54 crc kubenswrapper[4861]: I1003 14:53:54.301808 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a2693b41-01f0-48e9-b551-fa6c48d29531/glance-log/0.log" Oct 03 14:53:54 crc kubenswrapper[4861]: I1003 14:53:54.598410 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6c8cb9d9fb-bt6ls_81ec621b-cc30-4ab2-ae0e-bdd71629009f/horizon/0.log" Oct 03 14:53:54 crc kubenswrapper[4861]: I1003 14:53:54.642580 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6c8cb9d9fb-bt6ls_81ec621b-cc30-4ab2-ae0e-bdd71629009f/horizon/1.log" Oct 03 14:53:54 crc kubenswrapper[4861]: I1003 14:53:54.943146 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-69f6f_e23c8e2c-00d8-43d0-a10a-5f7fef662315/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:55 crc kubenswrapper[4861]: I1003 14:53:55.106388 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6c8cb9d9fb-bt6ls_81ec621b-cc30-4ab2-ae0e-bdd71629009f/horizon-log/0.log" Oct 03 14:53:55 crc kubenswrapper[4861]: I1003 14:53:55.148183 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-hk68t_fd61a6d8-4264-4d03-8891-c1bdf462fa7b/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:55 crc kubenswrapper[4861]: I1003 14:53:55.293803 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29325001-vhzr2_eb217f97-9f13-4711-b3ab-f449bdc34bae/keystone-cron/0.log" Oct 03 14:53:55 crc kubenswrapper[4861]: I1003 14:53:55.619439 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ccb19566-aa49-4551-9fbf-a05affdd60e2/kube-state-metrics/0.log" Oct 03 14:53:56 crc kubenswrapper[4861]: I1003 14:53:56.011397 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9_fd4c1aae-4d8b-49be-ad63-d6531b244f73/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:56 crc kubenswrapper[4861]: I1003 14:53:56.018213 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-d84968f5f-dw9pq_fbeb45d8-15f9-47b5-b6af-f578362eda62/keystone-api/0.log" Oct 03 14:53:56 crc kubenswrapper[4861]: I1003 14:53:56.858715 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55bc9b5c77-pkdzr_b539b449-5e16-4bb4-8931-ba6c9ad1df7d/neutron-httpd/0.log" Oct 03 14:53:56 crc kubenswrapper[4861]: I1003 14:53:56.989934 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55bc9b5c77-pkdzr_b539b449-5e16-4bb4-8931-ba6c9ad1df7d/neutron-api/0.log" Oct 03 14:53:57 crc kubenswrapper[4861]: I1003 14:53:57.030161 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2_5a870822-9c29-4acb-b63c-2ff86a95a9fc/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:57 crc kubenswrapper[4861]: I1003 14:53:57.153563 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7464ed9c-8f08-4c7a-8ca3-8a57734bd31f/memcached/0.log" Oct 03 14:53:57 crc kubenswrapper[4861]: I1003 14:53:57.839091 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_04bb7c95-25e1-49b2-b659-2af6e5354749/nova-cell0-conductor-conductor/0.log" Oct 03 14:53:58 crc kubenswrapper[4861]: I1003 14:53:58.278821 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_90ae374e-4f8e-4d9c-84e8-00a5c571fd98/nova-cell1-conductor-conductor/0.log" Oct 03 14:53:58 crc kubenswrapper[4861]: I1003 14:53:58.296581 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9f7bb8d1-2c92-4ce3-b510-386e42fab1ac/nova-api-log/0.log" Oct 03 14:53:58 crc kubenswrapper[4861]: I1003 14:53:58.471790 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9f7bb8d1-2c92-4ce3-b510-386e42fab1ac/nova-api-api/0.log" Oct 03 14:53:58 crc kubenswrapper[4861]: I1003 14:53:58.597404 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_9b155fef-8eef-48f9-a6fe-b76d46ddadb0/nova-cell1-novncproxy-novncproxy/0.log" Oct 03 14:53:58 crc kubenswrapper[4861]: I1003 14:53:58.755970 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-mlf7m_bc64fc8f-fe84-485b-8d52-a4e26a00435a/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:53:58 crc kubenswrapper[4861]: I1003 14:53:58.908814 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_92433e8c-4d6d-4a9d-a492-192863eed46c/nova-metadata-log/0.log" Oct 03 14:53:59 crc kubenswrapper[4861]: I1003 14:53:59.430943 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_cc0949fe-630b-4f65-9c6c-7a87272586a2/mysql-bootstrap/0.log" Oct 03 14:53:59 crc kubenswrapper[4861]: I1003 14:53:59.698887 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_cc0949fe-630b-4f65-9c6c-7a87272586a2/galera/0.log" Oct 03 14:53:59 crc kubenswrapper[4861]: I1003 14:53:59.700099 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_cc0949fe-630b-4f65-9c6c-7a87272586a2/mysql-bootstrap/0.log" Oct 03 14:53:59 crc kubenswrapper[4861]: I1003 14:53:59.701384 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c364afdb-ff79-4011-abeb-243d45ea7b95/nova-scheduler-scheduler/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.090933 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1/mysql-bootstrap/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.146689 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.146746 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.146789 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.147604 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.147657 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" gracePeriod=600 Oct 03 14:54:00 crc kubenswrapper[4861]: E1003 14:54:00.279766 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.295371 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_92433e8c-4d6d-4a9d-a492-192863eed46c/nova-metadata-metadata/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.518836 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1/mysql-bootstrap/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.538569 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1/galera/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.583337 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_08030cdc-19c1-46f6-940c-48d493ce3880/openstackclient/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.732917 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" exitCode=0 Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.733183 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132"} Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.733345 4861 scope.go:117] "RemoveContainer" containerID="c9fca1af93d570056d03200b887e5822727fbbd0c431d91df4b8a8541a504191" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.734140 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:54:00 crc kubenswrapper[4861]: E1003 14:54:00.734736 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.736199 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-xsmhv_0636b1a6-6f21-4d14-8a07-014a3e9395c7/openstack-network-exporter/0.log" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.766332 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" podStartSLOduration=67.298194455 podStartE2EDuration="1m44.766314293s" podCreationTimestamp="2025-10-03 14:52:16 +0000 UTC" firstStartedPulling="2025-10-03 14:52:16.87874321 +0000 UTC m=+4850.876728257" lastFinishedPulling="2025-10-03 14:52:54.346863048 +0000 UTC m=+4888.344848095" observedRunningTime="2025-10-03 14:52:55.155095198 +0000 UTC m=+4889.153080255" watchObservedRunningTime="2025-10-03 14:54:00.766314293 +0000 UTC m=+4954.764299340" Oct 03 14:54:00 crc kubenswrapper[4861]: I1003 14:54:00.922613 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovsdb-server-init/0.log" Oct 03 14:54:01 crc kubenswrapper[4861]: I1003 14:54:01.153171 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovsdb-server-init/0.log" Oct 03 14:54:01 crc kubenswrapper[4861]: I1003 14:54:01.235283 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovsdb-server/0.log" Oct 03 14:54:01 crc kubenswrapper[4861]: I1003 14:54:01.270688 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovs-vswitchd/0.log" Oct 03 14:54:01 crc kubenswrapper[4861]: I1003 14:54:01.499803 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-7c2gq_defed5cd-53ca-4e60-af05-a4c425abbf60/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:01 crc kubenswrapper[4861]: I1003 14:54:01.523557 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-x5wkc_ef2b767c-ef6a-4364-a3f0-14b68bee3986/ovn-controller/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.008545 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b310d13d-da67-4151-948d-36a86d413a27/openstack-network-exporter/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.109537 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b310d13d-da67-4151-948d-36a86d413a27/ovn-northd/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.184333 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6714f489-ceb2-4b99-a61d-fe45289bed5f/openstack-network-exporter/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.248145 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6714f489-ceb2-4b99-a61d-fe45289bed5f/ovsdbserver-nb/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.365901 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a361bfa1-97ef-4ee2-bcfe-3763898cbc32/openstack-network-exporter/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.420480 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a361bfa1-97ef-4ee2-bcfe-3763898cbc32/ovsdbserver-sb/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.873470 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c6f6cb4f6-xc49q_37f329ec-eb69-4d87-a22b-ace765cef57f/placement-api/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.907025 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b222a9ce-46d6-4caf-b76d-f6b773276cb1/setup-container/0.log" Oct 03 14:54:02 crc kubenswrapper[4861]: I1003 14:54:02.919587 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c6f6cb4f6-xc49q_37f329ec-eb69-4d87-a22b-ace765cef57f/placement-log/0.log" Oct 03 14:54:03 crc kubenswrapper[4861]: I1003 14:54:03.197138 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b222a9ce-46d6-4caf-b76d-f6b773276cb1/rabbitmq/0.log" Oct 03 14:54:03 crc kubenswrapper[4861]: I1003 14:54:03.220408 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0494d758-5f63-45c6-930c-f34b43484fd9/setup-container/0.log" Oct 03 14:54:03 crc kubenswrapper[4861]: I1003 14:54:03.274718 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b222a9ce-46d6-4caf-b76d-f6b773276cb1/setup-container/0.log" Oct 03 14:54:03 crc kubenswrapper[4861]: I1003 14:54:03.567105 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0494d758-5f63-45c6-930c-f34b43484fd9/rabbitmq/0.log" Oct 03 14:54:03 crc kubenswrapper[4861]: I1003 14:54:03.601830 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0494d758-5f63-45c6-930c-f34b43484fd9/setup-container/0.log" Oct 03 14:54:03 crc kubenswrapper[4861]: I1003 14:54:03.652586 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-g587s_b68af2ce-5dae-47da-801b-a2ad6a6b8db1/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:04 crc kubenswrapper[4861]: I1003 14:54:04.743895 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-js7xl_a5b6d421-13d1-4c5b-b244-087790b16c8b/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:04 crc kubenswrapper[4861]: I1003 14:54:04.845113 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67_bb1b5aa4-44c6-475c-8995-ac100260ce29/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:05 crc kubenswrapper[4861]: I1003 14:54:05.064269 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-996sx_6877d996-5390-4bac-8c57-cd3f25a65554/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:05 crc kubenswrapper[4861]: I1003 14:54:05.253307 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-7qzsr_e8a2edd4-057a-4eca-948e-5c3eeb2a0550/ssh-known-hosts-edpm-deployment/0.log" Oct 03 14:54:05 crc kubenswrapper[4861]: I1003 14:54:05.460886 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5df8ff69f5-s22b2_71387314-2734-4ddf-ba27-c27c5cc05b0b/proxy-httpd/0.log" Oct 03 14:54:05 crc kubenswrapper[4861]: I1003 14:54:05.724829 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5df8ff69f5-s22b2_71387314-2734-4ddf-ba27-c27c5cc05b0b/proxy-server/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.363250 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-reaper/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.364485 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-s5lt7_971f0946-1828-4512-9b7d-2bafc5a78ef3/swift-ring-rebalance/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.365328 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-auditor/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.650946 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-auditor/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.741608 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-replicator/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.748545 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-replicator/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.764885 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-server/0.log" Oct 03 14:54:06 crc kubenswrapper[4861]: I1003 14:54:06.985835 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-server/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.029813 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-expirer/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.055385 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-auditor/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.094142 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-updater/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.248436 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-replicator/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.284803 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-updater/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.346272 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-server/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.369039 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/rsync/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.472714 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/swift-recon-cron/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.615309 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6_3f63691e-8f0c-4494-a774-46fe7aaba3c9/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.751385 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_a0580466-6d38-4ad0-a84e-dcf312f06369/tempest-tests-tempest-tests-runner/0.log" Oct 03 14:54:07 crc kubenswrapper[4861]: I1003 14:54:07.925040 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9b4f3282-6471-451a-95d3-5a1803ae69ae/test-operator-logs-container/0.log" Oct 03 14:54:08 crc kubenswrapper[4861]: I1003 14:54:08.187603 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz_c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:54:11 crc kubenswrapper[4861]: I1003 14:54:11.681740 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:54:11 crc kubenswrapper[4861]: E1003 14:54:11.682407 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:54:22 crc kubenswrapper[4861]: I1003 14:54:22.681295 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:54:22 crc kubenswrapper[4861]: E1003 14:54:22.682896 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:54:37 crc kubenswrapper[4861]: I1003 14:54:37.681641 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:54:37 crc kubenswrapper[4861]: E1003 14:54:37.682423 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:54:50 crc kubenswrapper[4861]: I1003 14:54:50.681668 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:54:50 crc kubenswrapper[4861]: E1003 14:54:50.682771 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:55:04 crc kubenswrapper[4861]: I1003 14:55:04.681930 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:55:04 crc kubenswrapper[4861]: E1003 14:55:04.682873 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:55:12 crc kubenswrapper[4861]: I1003 14:55:12.435942 4861 generic.go:334] "Generic (PLEG): container finished" podID="0f8ffc26-6a81-439d-b527-587280137d65" containerID="1aaf47a529585f987156745c94e49df181436cc02df40537792c398f16030b3f" exitCode=0 Oct 03 14:55:12 crc kubenswrapper[4861]: I1003 14:55:12.435999 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" event={"ID":"0f8ffc26-6a81-439d-b527-587280137d65","Type":"ContainerDied","Data":"1aaf47a529585f987156745c94e49df181436cc02df40537792c398f16030b3f"} Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.544094 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.597300 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5glhl/crc-debug-jrrnv"] Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.612619 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5glhl/crc-debug-jrrnv"] Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.741118 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss2kj\" (UniqueName: \"kubernetes.io/projected/0f8ffc26-6a81-439d-b527-587280137d65-kube-api-access-ss2kj\") pod \"0f8ffc26-6a81-439d-b527-587280137d65\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.742390 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0f8ffc26-6a81-439d-b527-587280137d65-host\") pod \"0f8ffc26-6a81-439d-b527-587280137d65\" (UID: \"0f8ffc26-6a81-439d-b527-587280137d65\") " Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.742490 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f8ffc26-6a81-439d-b527-587280137d65-host" (OuterVolumeSpecName: "host") pod "0f8ffc26-6a81-439d-b527-587280137d65" (UID: "0f8ffc26-6a81-439d-b527-587280137d65"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.746085 4861 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0f8ffc26-6a81-439d-b527-587280137d65-host\") on node \"crc\" DevicePath \"\"" Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.756409 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f8ffc26-6a81-439d-b527-587280137d65-kube-api-access-ss2kj" (OuterVolumeSpecName: "kube-api-access-ss2kj") pod "0f8ffc26-6a81-439d-b527-587280137d65" (UID: "0f8ffc26-6a81-439d-b527-587280137d65"). InnerVolumeSpecName "kube-api-access-ss2kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:55:13 crc kubenswrapper[4861]: I1003 14:55:13.848658 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss2kj\" (UniqueName: \"kubernetes.io/projected/0f8ffc26-6a81-439d-b527-587280137d65-kube-api-access-ss2kj\") on node \"crc\" DevicePath \"\"" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.458954 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b20fb79a0f988fcbcf80836c14bf562fd3ccf7951b0fbb818faf3b6656ff0e34" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.459068 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jrrnv" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.697870 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f8ffc26-6a81-439d-b527-587280137d65" path="/var/lib/kubelet/pods/0f8ffc26-6a81-439d-b527-587280137d65/volumes" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.776278 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5glhl/crc-debug-jvc5k"] Oct 03 14:55:14 crc kubenswrapper[4861]: E1003 14:55:14.777040 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f8ffc26-6a81-439d-b527-587280137d65" containerName="container-00" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.777061 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f8ffc26-6a81-439d-b527-587280137d65" containerName="container-00" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.777377 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f8ffc26-6a81-439d-b527-587280137d65" containerName="container-00" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.778128 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.867175 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm7b4\" (UniqueName: \"kubernetes.io/projected/04f8845d-532e-442d-8180-042c5629919a-kube-api-access-sm7b4\") pod \"crc-debug-jvc5k\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.867291 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04f8845d-532e-442d-8180-042c5629919a-host\") pod \"crc-debug-jvc5k\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.971940 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm7b4\" (UniqueName: \"kubernetes.io/projected/04f8845d-532e-442d-8180-042c5629919a-kube-api-access-sm7b4\") pod \"crc-debug-jvc5k\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.972009 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04f8845d-532e-442d-8180-042c5629919a-host\") pod \"crc-debug-jvc5k\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.972256 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04f8845d-532e-442d-8180-042c5629919a-host\") pod \"crc-debug-jvc5k\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:14 crc kubenswrapper[4861]: I1003 14:55:14.992008 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm7b4\" (UniqueName: \"kubernetes.io/projected/04f8845d-532e-442d-8180-042c5629919a-kube-api-access-sm7b4\") pod \"crc-debug-jvc5k\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:15 crc kubenswrapper[4861]: I1003 14:55:15.099180 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:15 crc kubenswrapper[4861]: I1003 14:55:15.468015 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" event={"ID":"04f8845d-532e-442d-8180-042c5629919a","Type":"ContainerStarted","Data":"fa8f98f5220911023149a79a7816b7eeadcac429f8627ed52354f1b5a47d935d"} Oct 03 14:55:15 crc kubenswrapper[4861]: I1003 14:55:15.468329 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" event={"ID":"04f8845d-532e-442d-8180-042c5629919a","Type":"ContainerStarted","Data":"092ff1c37470b4cb990542bfea79ce2f47bdc3df672fe58c2c0e8b4e9ed1a1b3"} Oct 03 14:55:15 crc kubenswrapper[4861]: I1003 14:55:15.489603 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" podStartSLOduration=1.48958549 podStartE2EDuration="1.48958549s" podCreationTimestamp="2025-10-03 14:55:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:55:15.479131941 +0000 UTC m=+5029.477116988" watchObservedRunningTime="2025-10-03 14:55:15.48958549 +0000 UTC m=+5029.487570547" Oct 03 14:55:16 crc kubenswrapper[4861]: I1003 14:55:16.489544 4861 generic.go:334] "Generic (PLEG): container finished" podID="04f8845d-532e-442d-8180-042c5629919a" containerID="fa8f98f5220911023149a79a7816b7eeadcac429f8627ed52354f1b5a47d935d" exitCode=0 Oct 03 14:55:16 crc kubenswrapper[4861]: I1003 14:55:16.489617 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" event={"ID":"04f8845d-532e-442d-8180-042c5629919a","Type":"ContainerDied","Data":"fa8f98f5220911023149a79a7816b7eeadcac429f8627ed52354f1b5a47d935d"} Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.599851 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.609159 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04f8845d-532e-442d-8180-042c5629919a-host\") pod \"04f8845d-532e-442d-8180-042c5629919a\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.609421 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/04f8845d-532e-442d-8180-042c5629919a-host" (OuterVolumeSpecName: "host") pod "04f8845d-532e-442d-8180-042c5629919a" (UID: "04f8845d-532e-442d-8180-042c5629919a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.609450 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm7b4\" (UniqueName: \"kubernetes.io/projected/04f8845d-532e-442d-8180-042c5629919a-kube-api-access-sm7b4\") pod \"04f8845d-532e-442d-8180-042c5629919a\" (UID: \"04f8845d-532e-442d-8180-042c5629919a\") " Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.610080 4861 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04f8845d-532e-442d-8180-042c5629919a-host\") on node \"crc\" DevicePath \"\"" Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.618944 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04f8845d-532e-442d-8180-042c5629919a-kube-api-access-sm7b4" (OuterVolumeSpecName: "kube-api-access-sm7b4") pod "04f8845d-532e-442d-8180-042c5629919a" (UID: "04f8845d-532e-442d-8180-042c5629919a"). InnerVolumeSpecName "kube-api-access-sm7b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.682089 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:55:17 crc kubenswrapper[4861]: E1003 14:55:17.682299 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:55:17 crc kubenswrapper[4861]: I1003 14:55:17.710911 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm7b4\" (UniqueName: \"kubernetes.io/projected/04f8845d-532e-442d-8180-042c5629919a-kube-api-access-sm7b4\") on node \"crc\" DevicePath \"\"" Oct 03 14:55:18 crc kubenswrapper[4861]: I1003 14:55:18.506515 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" event={"ID":"04f8845d-532e-442d-8180-042c5629919a","Type":"ContainerDied","Data":"092ff1c37470b4cb990542bfea79ce2f47bdc3df672fe58c2c0e8b4e9ed1a1b3"} Oct 03 14:55:18 crc kubenswrapper[4861]: I1003 14:55:18.506746 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="092ff1c37470b4cb990542bfea79ce2f47bdc3df672fe58c2c0e8b4e9ed1a1b3" Oct 03 14:55:18 crc kubenswrapper[4861]: I1003 14:55:18.506606 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-jvc5k" Oct 03 14:55:23 crc kubenswrapper[4861]: I1003 14:55:23.570191 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5glhl/crc-debug-jvc5k"] Oct 03 14:55:23 crc kubenswrapper[4861]: I1003 14:55:23.580817 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5glhl/crc-debug-jvc5k"] Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.702676 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04f8845d-532e-442d-8180-042c5629919a" path="/var/lib/kubelet/pods/04f8845d-532e-442d-8180-042c5629919a/volumes" Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.792973 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5glhl/crc-debug-qwck9"] Oct 03 14:55:24 crc kubenswrapper[4861]: E1003 14:55:24.793590 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04f8845d-532e-442d-8180-042c5629919a" containerName="container-00" Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.793622 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="04f8845d-532e-442d-8180-042c5629919a" containerName="container-00" Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.793991 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="04f8845d-532e-442d-8180-042c5629919a" containerName="container-00" Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.796106 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.920615 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gndfk\" (UniqueName: \"kubernetes.io/projected/d1736936-d30e-4b4c-9aa3-723b7c43d73e-kube-api-access-gndfk\") pod \"crc-debug-qwck9\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:24 crc kubenswrapper[4861]: I1003 14:55:24.920679 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1736936-d30e-4b4c-9aa3-723b7c43d73e-host\") pod \"crc-debug-qwck9\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.022168 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gndfk\" (UniqueName: \"kubernetes.io/projected/d1736936-d30e-4b4c-9aa3-723b7c43d73e-kube-api-access-gndfk\") pod \"crc-debug-qwck9\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.022214 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1736936-d30e-4b4c-9aa3-723b7c43d73e-host\") pod \"crc-debug-qwck9\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.022420 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1736936-d30e-4b4c-9aa3-723b7c43d73e-host\") pod \"crc-debug-qwck9\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.041353 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gndfk\" (UniqueName: \"kubernetes.io/projected/d1736936-d30e-4b4c-9aa3-723b7c43d73e-kube-api-access-gndfk\") pod \"crc-debug-qwck9\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.134771 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.563891 4861 generic.go:334] "Generic (PLEG): container finished" podID="d1736936-d30e-4b4c-9aa3-723b7c43d73e" containerID="cfaea1fec272a4139f7066e63982e61bce7bdb44af81e9b2b0611dcd8ea11ba3" exitCode=0 Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.563976 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-qwck9" event={"ID":"d1736936-d30e-4b4c-9aa3-723b7c43d73e","Type":"ContainerDied","Data":"cfaea1fec272a4139f7066e63982e61bce7bdb44af81e9b2b0611dcd8ea11ba3"} Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.564360 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/crc-debug-qwck9" event={"ID":"d1736936-d30e-4b4c-9aa3-723b7c43d73e","Type":"ContainerStarted","Data":"c2c11ed155a78cf20cad8e747ea8cfb5c34959337021d2caeace82e647f1e5e6"} Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.603701 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5glhl/crc-debug-qwck9"] Oct 03 14:55:25 crc kubenswrapper[4861]: I1003 14:55:25.612187 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5glhl/crc-debug-qwck9"] Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.666559 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.856918 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gndfk\" (UniqueName: \"kubernetes.io/projected/d1736936-d30e-4b4c-9aa3-723b7c43d73e-kube-api-access-gndfk\") pod \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.857020 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1736936-d30e-4b4c-9aa3-723b7c43d73e-host\") pod \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\" (UID: \"d1736936-d30e-4b4c-9aa3-723b7c43d73e\") " Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.857178 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1736936-d30e-4b4c-9aa3-723b7c43d73e-host" (OuterVolumeSpecName: "host") pod "d1736936-d30e-4b4c-9aa3-723b7c43d73e" (UID: "d1736936-d30e-4b4c-9aa3-723b7c43d73e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.857577 4861 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1736936-d30e-4b4c-9aa3-723b7c43d73e-host\") on node \"crc\" DevicePath \"\"" Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.878584 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1736936-d30e-4b4c-9aa3-723b7c43d73e-kube-api-access-gndfk" (OuterVolumeSpecName: "kube-api-access-gndfk") pod "d1736936-d30e-4b4c-9aa3-723b7c43d73e" (UID: "d1736936-d30e-4b4c-9aa3-723b7c43d73e"). InnerVolumeSpecName "kube-api-access-gndfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:55:26 crc kubenswrapper[4861]: I1003 14:55:26.959727 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gndfk\" (UniqueName: \"kubernetes.io/projected/d1736936-d30e-4b4c-9aa3-723b7c43d73e-kube-api-access-gndfk\") on node \"crc\" DevicePath \"\"" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.549613 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/util/0.log" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.582075 4861 scope.go:117] "RemoveContainer" containerID="cfaea1fec272a4139f7066e63982e61bce7bdb44af81e9b2b0611dcd8ea11ba3" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.582104 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/crc-debug-qwck9" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.687167 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/util/0.log" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.766134 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/pull/0.log" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.784703 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/pull/0.log" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.931550 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/pull/0.log" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.947044 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/util/0.log" Oct 03 14:55:27 crc kubenswrapper[4861]: I1003 14:55:27.947329 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/extract/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.125066 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-vjdcg_b7d80f0f-8c96-446e-a31e-90913d19d661/kube-rbac-proxy/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.170256 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-vjdcg_b7d80f0f-8c96-446e-a31e-90913d19d661/manager/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.258895 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-sfz28_a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc/kube-rbac-proxy/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.400247 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-sfz28_a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc/manager/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.432463 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-4nvdf_24be9f7b-3c61-4434-8863-b3b5d9e5ee2a/kube-rbac-proxy/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.475003 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-4nvdf_24be9f7b-3c61-4434-8863-b3b5d9e5ee2a/manager/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.631986 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-qkqbk_5a3fecb8-5f79-4f05-9169-7d5cf9072f2c/kube-rbac-proxy/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.686755 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-qkqbk_5a3fecb8-5f79-4f05-9169-7d5cf9072f2c/manager/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.691263 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1736936-d30e-4b4c-9aa3-723b7c43d73e" path="/var/lib/kubelet/pods/d1736936-d30e-4b4c-9aa3-723b7c43d73e/volumes" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.863088 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-ch9nw_20ad49e2-5077-43e2-8671-58457cf10432/kube-rbac-proxy/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.878568 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-ch9nw_20ad49e2-5077-43e2-8671-58457cf10432/manager/0.log" Oct 03 14:55:28 crc kubenswrapper[4861]: I1003 14:55:28.962959 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-qcfrn_c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1/kube-rbac-proxy/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.069610 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-qcfrn_c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1/manager/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.139081 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-8dttf_567dc82d-835f-4cf9-805d-a3d65c82b823/kube-rbac-proxy/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.301460 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-8dttf_567dc82d-835f-4cf9-805d-a3d65c82b823/manager/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.362466 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-mlqw7_cb92f20f-a3f0-42b3-ae87-11e0215c62fb/kube-rbac-proxy/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.406793 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-mlqw7_cb92f20f-a3f0-42b3-ae87-11e0215c62fb/manager/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.597820 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-k4wkd_685fbda0-ab44-4f3c-8614-d87234d29d2f/kube-rbac-proxy/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.648264 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-k4wkd_685fbda0-ab44-4f3c-8614-d87234d29d2f/manager/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.799415 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-x5mwn_c7d483ab-e555-49c8-93c9-8bb99928605a/kube-rbac-proxy/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.821225 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-x5mwn_c7d483ab-e555-49c8-93c9-8bb99928605a/manager/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.920406 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-rx55k_9bf321cf-e938-46ed-b8b9-01418f85de45/kube-rbac-proxy/0.log" Oct 03 14:55:29 crc kubenswrapper[4861]: I1003 14:55:29.990684 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-rx55k_9bf321cf-e938-46ed-b8b9-01418f85de45/manager/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.087796 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-6qwsn_57962592-42ae-47a9-a651-7c5d0e3ffad5/kube-rbac-proxy/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.185546 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-6qwsn_57962592-42ae-47a9-a651-7c5d0e3ffad5/manager/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.312113 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-v26h5_fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8/kube-rbac-proxy/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.375871 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-v26h5_fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8/manager/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.461088 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-qlrdc_16695342-b32b-4303-b248-d616d2ab9676/kube-rbac-proxy/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.564962 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-qlrdc_16695342-b32b-4303-b248-d616d2ab9676/manager/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.697683 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp_274f0db6-b0be-41da-a6ca-47160736f8e8/kube-rbac-proxy/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.739141 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp_274f0db6-b0be-41da-a6ca-47160736f8e8/manager/0.log" Oct 03 14:55:30 crc kubenswrapper[4861]: I1003 14:55:30.872217 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8fd589ff7-d8tns_ea16cb5e-459c-4ad2-9579-17bd88783158/kube-rbac-proxy/0.log" Oct 03 14:55:31 crc kubenswrapper[4861]: I1003 14:55:31.194047 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b7969687c-wz6wm_49f1e7a1-ab0a-46b2-97c6-a069d913657d/kube-rbac-proxy/0.log" Oct 03 14:55:31 crc kubenswrapper[4861]: I1003 14:55:31.375445 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b7969687c-wz6wm_49f1e7a1-ab0a-46b2-97c6-a069d913657d/operator/0.log" Oct 03 14:55:31 crc kubenswrapper[4861]: I1003 14:55:31.416122 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-tkc8v_48467041-0fb2-4032-a831-11500776f212/registry-server/0.log" Oct 03 14:55:31 crc kubenswrapper[4861]: I1003 14:55:31.772337 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-xgq77_5f7b92ed-6113-4c12-a8ec-25589c15dd32/manager/0.log" Oct 03 14:55:31 crc kubenswrapper[4861]: I1003 14:55:31.775298 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-xgq77_5f7b92ed-6113-4c12-a8ec-25589c15dd32/kube-rbac-proxy/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.036750 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-5tm2n_92f8bc55-a8b1-41dd-9490-12c2280106ed/kube-rbac-proxy/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.045811 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-5tm2n_92f8bc55-a8b1-41dd-9490-12c2280106ed/manager/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.078763 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8fd589ff7-d8tns_ea16cb5e-459c-4ad2-9579-17bd88783158/manager/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.087566 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-x75vq_68cf4faf-6f3d-4dfe-9a86-22a803baf77c/operator/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.221830 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9wwkj_1fa5571a-b9b5-4395-aa7a-a32a670f8e92/kube-rbac-proxy/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.298501 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9wwkj_1fa5571a-b9b5-4395-aa7a-a32a670f8e92/manager/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.342614 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-5tfpv_682b0ab4-202c-4455-872c-715e9e6c4ee1/kube-rbac-proxy/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.446720 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-5tfpv_682b0ab4-202c-4455-872c-715e9e6c4ee1/manager/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.485187 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-stsgz_896120d6-4995-4fd4-a238-4b34c6128326/kube-rbac-proxy/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.523587 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-stsgz_896120d6-4995-4fd4-a238-4b34c6128326/manager/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.645391 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-mwpqn_0ab6f3ab-52bf-404a-8102-195683e803e8/kube-rbac-proxy/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.678560 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-mwpqn_0ab6f3ab-52bf-404a-8102-195683e803e8/manager/0.log" Oct 03 14:55:32 crc kubenswrapper[4861]: I1003 14:55:32.681485 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:55:32 crc kubenswrapper[4861]: E1003 14:55:32.681821 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:55:43 crc kubenswrapper[4861]: I1003 14:55:43.680999 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:55:43 crc kubenswrapper[4861]: E1003 14:55:43.682331 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:55:50 crc kubenswrapper[4861]: I1003 14:55:50.289177 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fcq84_876d6207-8976-4d02-887b-b431a4821eab/control-plane-machine-set-operator/0.log" Oct 03 14:55:50 crc kubenswrapper[4861]: I1003 14:55:50.450924 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rg4gb_04154cd6-a67c-42d3-bbb0-951c4986390d/machine-api-operator/0.log" Oct 03 14:55:50 crc kubenswrapper[4861]: I1003 14:55:50.465720 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rg4gb_04154cd6-a67c-42d3-bbb0-951c4986390d/kube-rbac-proxy/0.log" Oct 03 14:55:55 crc kubenswrapper[4861]: I1003 14:55:55.680972 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:55:55 crc kubenswrapper[4861]: E1003 14:55:55.681682 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:56:03 crc kubenswrapper[4861]: I1003 14:56:03.561441 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-mpwfn_f5edb507-a930-4aac-b964-575b85ab8676/cert-manager-controller/0.log" Oct 03 14:56:03 crc kubenswrapper[4861]: I1003 14:56:03.668038 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-fkrcs_10cb85f1-4e24-4793-b1d0-8c3b11ceb85c/cert-manager-cainjector/0.log" Oct 03 14:56:03 crc kubenswrapper[4861]: I1003 14:56:03.773202 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-872cz_16b81380-ffaa-4755-a4d0-86c03eb2094f/cert-manager-webhook/0.log" Oct 03 14:56:09 crc kubenswrapper[4861]: I1003 14:56:09.693191 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:56:09 crc kubenswrapper[4861]: E1003 14:56:09.694060 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:56:15 crc kubenswrapper[4861]: I1003 14:56:15.958368 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-jmr9f_767a82d2-0bea-436b-b63a-c5bbf0de86b8/nmstate-console-plugin/0.log" Oct 03 14:56:16 crc kubenswrapper[4861]: I1003 14:56:16.118808 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-b4mxl_74a61ffa-6414-4072-952f-d3a9e5df2cad/kube-rbac-proxy/0.log" Oct 03 14:56:16 crc kubenswrapper[4861]: I1003 14:56:16.179526 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-thvxb_c1ec679d-6458-489b-bdb3-6c6ec465d695/nmstate-handler/0.log" Oct 03 14:56:16 crc kubenswrapper[4861]: I1003 14:56:16.207390 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-b4mxl_74a61ffa-6414-4072-952f-d3a9e5df2cad/nmstate-metrics/0.log" Oct 03 14:56:16 crc kubenswrapper[4861]: I1003 14:56:16.371203 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-kxqsn_28617d69-b62d-41a8-bb48-d89be9a37676/nmstate-operator/0.log" Oct 03 14:56:16 crc kubenswrapper[4861]: I1003 14:56:16.434781 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-v6q9r_cecb92af-dba6-4e7b-825c-97d7fcd5cc5d/nmstate-webhook/0.log" Oct 03 14:56:23 crc kubenswrapper[4861]: I1003 14:56:23.681719 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:56:23 crc kubenswrapper[4861]: E1003 14:56:23.682433 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.409019 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-cm757_6aca0805-4feb-4b13-8b46-f41858176496/kube-rbac-proxy/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.540367 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-cm757_6aca0805-4feb-4b13-8b46-f41858176496/controller/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.653664 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-jn69p_9480abea-a1f1-4416-880b-8fb72fd8716b/frr-k8s-webhook-server/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.703918 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.892689 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.894013 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.929936 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 14:56:31 crc kubenswrapper[4861]: I1003 14:56:31.961957 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.116457 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.138643 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.158276 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.188163 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.326640 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.342872 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.351425 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.375334 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/controller/0.log" Oct 03 14:56:32 crc kubenswrapper[4861]: I1003 14:56:32.984575 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/frr-metrics/0.log" Oct 03 14:56:33 crc kubenswrapper[4861]: I1003 14:56:33.054638 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/kube-rbac-proxy/0.log" Oct 03 14:56:33 crc kubenswrapper[4861]: I1003 14:56:33.120608 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/kube-rbac-proxy-frr/0.log" Oct 03 14:56:33 crc kubenswrapper[4861]: I1003 14:56:33.343924 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/reloader/0.log" Oct 03 14:56:33 crc kubenswrapper[4861]: I1003 14:56:33.449003 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5dbf4fd78b-2d6hn_03b31621-6432-424e-a55f-aecaf846b082/manager/0.log" Oct 03 14:56:33 crc kubenswrapper[4861]: I1003 14:56:33.702466 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5b7c788d54-q5fg6_f8fb711a-85c6-4014-8f23-7edabc8faf74/webhook-server/0.log" Oct 03 14:56:33 crc kubenswrapper[4861]: I1003 14:56:33.928096 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vdx27_6d48b95e-8866-4456-95c0-4c3b728f6f93/kube-rbac-proxy/0.log" Oct 03 14:56:34 crc kubenswrapper[4861]: I1003 14:56:34.482638 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/frr/0.log" Oct 03 14:56:34 crc kubenswrapper[4861]: I1003 14:56:34.497479 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vdx27_6d48b95e-8866-4456-95c0-4c3b728f6f93/speaker/0.log" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.353899 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vzsg9"] Oct 03 14:56:35 crc kubenswrapper[4861]: E1003 14:56:35.357819 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1736936-d30e-4b4c-9aa3-723b7c43d73e" containerName="container-00" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.357907 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1736936-d30e-4b4c-9aa3-723b7c43d73e" containerName="container-00" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.358169 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1736936-d30e-4b4c-9aa3-723b7c43d73e" containerName="container-00" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.359530 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.379574 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vzsg9"] Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.463827 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-utilities\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.463864 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-catalog-content\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.464077 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g5tj\" (UniqueName: \"kubernetes.io/projected/830f02d4-6548-486c-86df-a009fa9a5221-kube-api-access-5g5tj\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.565465 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g5tj\" (UniqueName: \"kubernetes.io/projected/830f02d4-6548-486c-86df-a009fa9a5221-kube-api-access-5g5tj\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.565631 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-utilities\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.565659 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-catalog-content\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.566252 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-catalog-content\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.566306 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-utilities\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.589922 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g5tj\" (UniqueName: \"kubernetes.io/projected/830f02d4-6548-486c-86df-a009fa9a5221-kube-api-access-5g5tj\") pod \"certified-operators-vzsg9\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:35 crc kubenswrapper[4861]: I1003 14:56:35.675790 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:36 crc kubenswrapper[4861]: I1003 14:56:36.628205 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vzsg9"] Oct 03 14:56:36 crc kubenswrapper[4861]: I1003 14:56:36.704725 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:56:36 crc kubenswrapper[4861]: E1003 14:56:36.705512 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:56:37 crc kubenswrapper[4861]: I1003 14:56:37.172515 4861 generic.go:334] "Generic (PLEG): container finished" podID="830f02d4-6548-486c-86df-a009fa9a5221" containerID="051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401" exitCode=0 Oct 03 14:56:37 crc kubenswrapper[4861]: I1003 14:56:37.172809 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerDied","Data":"051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401"} Oct 03 14:56:37 crc kubenswrapper[4861]: I1003 14:56:37.172836 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerStarted","Data":"f36737db4e4019dc9e3b26e0f1a6e94e7a50e549241befb0e1082892ad1f346d"} Oct 03 14:56:38 crc kubenswrapper[4861]: I1003 14:56:38.186963 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerStarted","Data":"442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82"} Oct 03 14:56:39 crc kubenswrapper[4861]: I1003 14:56:39.197204 4861 generic.go:334] "Generic (PLEG): container finished" podID="830f02d4-6548-486c-86df-a009fa9a5221" containerID="442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82" exitCode=0 Oct 03 14:56:39 crc kubenswrapper[4861]: I1003 14:56:39.197278 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerDied","Data":"442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82"} Oct 03 14:56:40 crc kubenswrapper[4861]: I1003 14:56:40.207185 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerStarted","Data":"a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd"} Oct 03 14:56:40 crc kubenswrapper[4861]: I1003 14:56:40.232627 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vzsg9" podStartSLOduration=2.818889229 podStartE2EDuration="5.232606603s" podCreationTimestamp="2025-10-03 14:56:35 +0000 UTC" firstStartedPulling="2025-10-03 14:56:37.174755432 +0000 UTC m=+5111.172740479" lastFinishedPulling="2025-10-03 14:56:39.588472806 +0000 UTC m=+5113.586457853" observedRunningTime="2025-10-03 14:56:40.226828349 +0000 UTC m=+5114.224813396" watchObservedRunningTime="2025-10-03 14:56:40.232606603 +0000 UTC m=+5114.230591660" Oct 03 14:56:45 crc kubenswrapper[4861]: I1003 14:56:45.676286 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:45 crc kubenswrapper[4861]: I1003 14:56:45.676930 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:45 crc kubenswrapper[4861]: I1003 14:56:45.730450 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:46 crc kubenswrapper[4861]: I1003 14:56:46.323887 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:46 crc kubenswrapper[4861]: I1003 14:56:46.371177 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vzsg9"] Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.282540 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vzsg9" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="registry-server" containerID="cri-o://a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd" gracePeriod=2 Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.685885 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:56:48 crc kubenswrapper[4861]: E1003 14:56:48.686338 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.835591 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.917720 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g5tj\" (UniqueName: \"kubernetes.io/projected/830f02d4-6548-486c-86df-a009fa9a5221-kube-api-access-5g5tj\") pod \"830f02d4-6548-486c-86df-a009fa9a5221\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.917916 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-utilities\") pod \"830f02d4-6548-486c-86df-a009fa9a5221\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.917963 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-catalog-content\") pod \"830f02d4-6548-486c-86df-a009fa9a5221\" (UID: \"830f02d4-6548-486c-86df-a009fa9a5221\") " Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.919139 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-utilities" (OuterVolumeSpecName: "utilities") pod "830f02d4-6548-486c-86df-a009fa9a5221" (UID: "830f02d4-6548-486c-86df-a009fa9a5221"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.924512 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/830f02d4-6548-486c-86df-a009fa9a5221-kube-api-access-5g5tj" (OuterVolumeSpecName: "kube-api-access-5g5tj") pod "830f02d4-6548-486c-86df-a009fa9a5221" (UID: "830f02d4-6548-486c-86df-a009fa9a5221"). InnerVolumeSpecName "kube-api-access-5g5tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:56:48 crc kubenswrapper[4861]: I1003 14:56:48.988211 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "830f02d4-6548-486c-86df-a009fa9a5221" (UID: "830f02d4-6548-486c-86df-a009fa9a5221"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.019647 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.019680 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/830f02d4-6548-486c-86df-a009fa9a5221-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.019690 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g5tj\" (UniqueName: \"kubernetes.io/projected/830f02d4-6548-486c-86df-a009fa9a5221-kube-api-access-5g5tj\") on node \"crc\" DevicePath \"\"" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.172856 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/util/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.294594 4861 generic.go:334] "Generic (PLEG): container finished" podID="830f02d4-6548-486c-86df-a009fa9a5221" containerID="a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd" exitCode=0 Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.294647 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerDied","Data":"a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd"} Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.294677 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzsg9" event={"ID":"830f02d4-6548-486c-86df-a009fa9a5221","Type":"ContainerDied","Data":"f36737db4e4019dc9e3b26e0f1a6e94e7a50e549241befb0e1082892ad1f346d"} Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.294676 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vzsg9" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.294750 4861 scope.go:117] "RemoveContainer" containerID="a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.315619 4861 scope.go:117] "RemoveContainer" containerID="442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.333463 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vzsg9"] Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.339900 4861 scope.go:117] "RemoveContainer" containerID="051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.358523 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vzsg9"] Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.406175 4861 scope.go:117] "RemoveContainer" containerID="a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd" Oct 03 14:56:49 crc kubenswrapper[4861]: E1003 14:56:49.416351 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd\": container with ID starting with a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd not found: ID does not exist" containerID="a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.416391 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd"} err="failed to get container status \"a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd\": rpc error: code = NotFound desc = could not find container \"a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd\": container with ID starting with a956f90cf1a154570d327860243f2c4ed757e68f34a685bc0c0ae1fe17c473bd not found: ID does not exist" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.416425 4861 scope.go:117] "RemoveContainer" containerID="442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82" Oct 03 14:56:49 crc kubenswrapper[4861]: E1003 14:56:49.417533 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82\": container with ID starting with 442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82 not found: ID does not exist" containerID="442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.417558 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82"} err="failed to get container status \"442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82\": rpc error: code = NotFound desc = could not find container \"442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82\": container with ID starting with 442c30f94408da1003e9c52afc276a7b19c1bdeac3fdb332dc2f51b4b97e0c82 not found: ID does not exist" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.417578 4861 scope.go:117] "RemoveContainer" containerID="051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401" Oct 03 14:56:49 crc kubenswrapper[4861]: E1003 14:56:49.418798 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401\": container with ID starting with 051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401 not found: ID does not exist" containerID="051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.418837 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401"} err="failed to get container status \"051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401\": rpc error: code = NotFound desc = could not find container \"051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401\": container with ID starting with 051d0eb8249165c81496e060543df17e63ed5a549594136595d430eabde9e401 not found: ID does not exist" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.516487 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/pull/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.517618 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/util/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.522871 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/pull/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.729066 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/pull/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.747452 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/util/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.753721 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/extract/0.log" Oct 03 14:56:49 crc kubenswrapper[4861]: I1003 14:56:49.925804 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-utilities/0.log" Oct 03 14:56:50 crc kubenswrapper[4861]: I1003 14:56:50.154053 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-content/0.log" Oct 03 14:56:50 crc kubenswrapper[4861]: I1003 14:56:50.167516 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-utilities/0.log" Oct 03 14:56:50 crc kubenswrapper[4861]: I1003 14:56:50.267323 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-content/0.log" Oct 03 14:56:50 crc kubenswrapper[4861]: I1003 14:56:50.382278 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-utilities/0.log" Oct 03 14:56:50 crc kubenswrapper[4861]: I1003 14:56:50.445107 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-content/0.log" Oct 03 14:56:50 crc kubenswrapper[4861]: I1003 14:56:50.708326 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="830f02d4-6548-486c-86df-a009fa9a5221" path="/var/lib/kubelet/pods/830f02d4-6548-486c-86df-a009fa9a5221/volumes" Oct 03 14:56:51 crc kubenswrapper[4861]: I1003 14:56:51.033485 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/registry-server/0.log" Oct 03 14:56:51 crc kubenswrapper[4861]: I1003 14:56:51.212185 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-utilities/0.log" Oct 03 14:56:51 crc kubenswrapper[4861]: I1003 14:56:51.511792 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-content/0.log" Oct 03 14:56:51 crc kubenswrapper[4861]: I1003 14:56:51.511795 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-content/0.log" Oct 03 14:56:51 crc kubenswrapper[4861]: I1003 14:56:51.514032 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-utilities/0.log" Oct 03 14:56:51 crc kubenswrapper[4861]: I1003 14:56:51.930046 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-utilities/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.015327 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-content/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.263355 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/util/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.459623 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/util/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.461313 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/pull/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.570651 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/pull/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.673482 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/registry-server/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.768112 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/pull/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.784177 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/util/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.830422 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/extract/0.log" Oct 03 14:56:52 crc kubenswrapper[4861]: I1003 14:56:52.985791 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-p5qrs_6d1c8721-e495-45da-8947-09c44940673d/marketplace-operator/0.log" Oct 03 14:56:53 crc kubenswrapper[4861]: I1003 14:56:53.048606 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-utilities/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.157031 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-content/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.191661 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-utilities/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.206863 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-content/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.416921 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-content/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.537621 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/registry-server/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.540101 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-utilities/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.550706 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-utilities/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.655296 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-utilities/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.719295 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-content/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.719521 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-content/0.log" Oct 03 14:56:54 crc kubenswrapper[4861]: I1003 14:56:54.915478 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-utilities/0.log" Oct 03 14:56:55 crc kubenswrapper[4861]: I1003 14:56:55.015052 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-content/0.log" Oct 03 14:56:55 crc kubenswrapper[4861]: I1003 14:56:55.618001 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/registry-server/0.log" Oct 03 14:57:01 crc kubenswrapper[4861]: I1003 14:57:01.680846 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:57:01 crc kubenswrapper[4861]: E1003 14:57:01.682533 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:57:14 crc kubenswrapper[4861]: I1003 14:57:14.683205 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:57:14 crc kubenswrapper[4861]: E1003 14:57:14.683960 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:57:27 crc kubenswrapper[4861]: I1003 14:57:27.681730 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:57:27 crc kubenswrapper[4861]: E1003 14:57:27.683074 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:57:39 crc kubenswrapper[4861]: I1003 14:57:39.682065 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:57:39 crc kubenswrapper[4861]: E1003 14:57:39.682981 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:57:52 crc kubenswrapper[4861]: I1003 14:57:52.682079 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:57:52 crc kubenswrapper[4861]: E1003 14:57:52.683415 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:58:07 crc kubenswrapper[4861]: I1003 14:58:07.681183 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:58:07 crc kubenswrapper[4861]: E1003 14:58:07.682099 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:58:22 crc kubenswrapper[4861]: I1003 14:58:22.682960 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:58:22 crc kubenswrapper[4861]: E1003 14:58:22.684474 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:58:34 crc kubenswrapper[4861]: I1003 14:58:34.681285 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:58:34 crc kubenswrapper[4861]: E1003 14:58:34.682451 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:58:46 crc kubenswrapper[4861]: I1003 14:58:46.693933 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:58:46 crc kubenswrapper[4861]: E1003 14:58:46.694948 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:58:57 crc kubenswrapper[4861]: I1003 14:58:57.681457 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:58:57 crc kubenswrapper[4861]: E1003 14:58:57.682173 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 14:59:00 crc kubenswrapper[4861]: I1003 14:59:00.376678 4861 scope.go:117] "RemoveContainer" containerID="1aaf47a529585f987156745c94e49df181436cc02df40537792c398f16030b3f" Oct 03 14:59:10 crc kubenswrapper[4861]: I1003 14:59:10.681617 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 14:59:11 crc kubenswrapper[4861]: I1003 14:59:11.734731 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"77c3015bab5401c3cc6bb663b4e9aeb836cb468891caf7c22966f819ab8330cb"} Oct 03 14:59:24 crc kubenswrapper[4861]: I1003 14:59:24.875905 4861 generic.go:334] "Generic (PLEG): container finished" podID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerID="e1d3f468dd297b00738b232908ce413f495ea3f23aaa84badb27c7810578650b" exitCode=0 Oct 03 14:59:24 crc kubenswrapper[4861]: I1003 14:59:24.875999 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5glhl/must-gather-wlfgj" event={"ID":"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac","Type":"ContainerDied","Data":"e1d3f468dd297b00738b232908ce413f495ea3f23aaa84badb27c7810578650b"} Oct 03 14:59:24 crc kubenswrapper[4861]: I1003 14:59:24.877335 4861 scope.go:117] "RemoveContainer" containerID="e1d3f468dd297b00738b232908ce413f495ea3f23aaa84badb27c7810578650b" Oct 03 14:59:25 crc kubenswrapper[4861]: I1003 14:59:25.061618 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5glhl_must-gather-wlfgj_34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac/gather/0.log" Oct 03 14:59:34 crc kubenswrapper[4861]: I1003 14:59:34.834037 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5glhl/must-gather-wlfgj"] Oct 03 14:59:34 crc kubenswrapper[4861]: I1003 14:59:34.834683 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-5glhl/must-gather-wlfgj" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="copy" containerID="cri-o://0ac1273bed044b44515dce1247ca39b3c87939931099f4b912aa066ea044192b" gracePeriod=2 Oct 03 14:59:34 crc kubenswrapper[4861]: I1003 14:59:34.847288 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5glhl/must-gather-wlfgj"] Oct 03 14:59:34 crc kubenswrapper[4861]: I1003 14:59:34.992855 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5glhl_must-gather-wlfgj_34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac/copy/0.log" Oct 03 14:59:34 crc kubenswrapper[4861]: I1003 14:59:34.993623 4861 generic.go:334] "Generic (PLEG): container finished" podID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerID="0ac1273bed044b44515dce1247ca39b3c87939931099f4b912aa066ea044192b" exitCode=143 Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.370915 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5glhl_must-gather-wlfgj_34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac/copy/0.log" Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.371567 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.542928 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd48n\" (UniqueName: \"kubernetes.io/projected/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-kube-api-access-jd48n\") pod \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.543143 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-must-gather-output\") pod \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\" (UID: \"34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac\") " Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.561361 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-kube-api-access-jd48n" (OuterVolumeSpecName: "kube-api-access-jd48n") pod "34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" (UID: "34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac"). InnerVolumeSpecName "kube-api-access-jd48n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.645290 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd48n\" (UniqueName: \"kubernetes.io/projected/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-kube-api-access-jd48n\") on node \"crc\" DevicePath \"\"" Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.734579 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" (UID: "34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:59:35 crc kubenswrapper[4861]: I1003 14:59:35.747186 4861 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 03 14:59:36 crc kubenswrapper[4861]: I1003 14:59:36.010477 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5glhl_must-gather-wlfgj_34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac/copy/0.log" Oct 03 14:59:36 crc kubenswrapper[4861]: I1003 14:59:36.011092 4861 scope.go:117] "RemoveContainer" containerID="0ac1273bed044b44515dce1247ca39b3c87939931099f4b912aa066ea044192b" Oct 03 14:59:36 crc kubenswrapper[4861]: I1003 14:59:36.011355 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5glhl/must-gather-wlfgj" Oct 03 14:59:36 crc kubenswrapper[4861]: I1003 14:59:36.040607 4861 scope.go:117] "RemoveContainer" containerID="e1d3f468dd297b00738b232908ce413f495ea3f23aaa84badb27c7810578650b" Oct 03 14:59:36 crc kubenswrapper[4861]: I1003 14:59:36.696012 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" path="/var/lib/kubelet/pods/34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac/volumes" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.162187 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch"] Oct 03 15:00:00 crc kubenswrapper[4861]: E1003 15:00:00.163033 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="extract-utilities" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163045 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="extract-utilities" Oct 03 15:00:00 crc kubenswrapper[4861]: E1003 15:00:00.163072 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="registry-server" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163078 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="registry-server" Oct 03 15:00:00 crc kubenswrapper[4861]: E1003 15:00:00.163087 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="extract-content" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163094 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="extract-content" Oct 03 15:00:00 crc kubenswrapper[4861]: E1003 15:00:00.163105 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="gather" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163111 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="gather" Oct 03 15:00:00 crc kubenswrapper[4861]: E1003 15:00:00.163125 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="copy" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163130 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="copy" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163306 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="gather" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163328 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="34266e5a-6aea-4c6c-8b0d-31c1eedfa1ac" containerName="copy" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163338 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="830f02d4-6548-486c-86df-a009fa9a5221" containerName="registry-server" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.163924 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.171337 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.177140 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.182581 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch"] Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.219483 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-config-volume\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.219585 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcgpd\" (UniqueName: \"kubernetes.io/projected/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-kube-api-access-wcgpd\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.219658 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-secret-volume\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.320845 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcgpd\" (UniqueName: \"kubernetes.io/projected/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-kube-api-access-wcgpd\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.320918 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-secret-volume\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.321155 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-config-volume\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.324328 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-config-volume\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.333319 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-secret-volume\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.345036 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcgpd\" (UniqueName: \"kubernetes.io/projected/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-kube-api-access-wcgpd\") pod \"collect-profiles-29325060-jhmch\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:00 crc kubenswrapper[4861]: I1003 15:00:00.507401 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:01 crc kubenswrapper[4861]: I1003 15:00:01.019222 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch"] Oct 03 15:00:01 crc kubenswrapper[4861]: W1003 15:00:01.021337 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fa5fd87_24ff_4cec_9b8a_0a4882a9881d.slice/crio-55b414cb1d395b7b4efeb84425b3d42501e86fc95a48388c398f4645324a4cd0 WatchSource:0}: Error finding container 55b414cb1d395b7b4efeb84425b3d42501e86fc95a48388c398f4645324a4cd0: Status 404 returned error can't find the container with id 55b414cb1d395b7b4efeb84425b3d42501e86fc95a48388c398f4645324a4cd0 Oct 03 15:00:01 crc kubenswrapper[4861]: I1003 15:00:01.252666 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" event={"ID":"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d","Type":"ContainerStarted","Data":"902b329578fa87198959e1995fe7bc5b2d66ba13245dd816c7bd532c98d93816"} Oct 03 15:00:01 crc kubenswrapper[4861]: I1003 15:00:01.252867 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" event={"ID":"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d","Type":"ContainerStarted","Data":"55b414cb1d395b7b4efeb84425b3d42501e86fc95a48388c398f4645324a4cd0"} Oct 03 15:00:01 crc kubenswrapper[4861]: I1003 15:00:01.282215 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" podStartSLOduration=1.282195021 podStartE2EDuration="1.282195021s" podCreationTimestamp="2025-10-03 15:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:00:01.273387375 +0000 UTC m=+5315.271372462" watchObservedRunningTime="2025-10-03 15:00:01.282195021 +0000 UTC m=+5315.280180058" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.267938 4861 generic.go:334] "Generic (PLEG): container finished" podID="1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" containerID="902b329578fa87198959e1995fe7bc5b2d66ba13245dd816c7bd532c98d93816" exitCode=0 Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.268288 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" event={"ID":"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d","Type":"ContainerDied","Data":"902b329578fa87198959e1995fe7bc5b2d66ba13245dd816c7bd532c98d93816"} Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.333325 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-htc7c"] Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.342325 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.358042 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-htc7c"] Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.410663 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-catalog-content\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.410904 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjqsx\" (UniqueName: \"kubernetes.io/projected/f096ca24-2f40-47ea-a942-deefad336bea-kube-api-access-vjqsx\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.412386 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-utilities\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.514563 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-utilities\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.514688 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-catalog-content\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.514801 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjqsx\" (UniqueName: \"kubernetes.io/projected/f096ca24-2f40-47ea-a942-deefad336bea-kube-api-access-vjqsx\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.515099 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-utilities\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.515443 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-catalog-content\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.535580 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjqsx\" (UniqueName: \"kubernetes.io/projected/f096ca24-2f40-47ea-a942-deefad336bea-kube-api-access-vjqsx\") pod \"community-operators-htc7c\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:02 crc kubenswrapper[4861]: I1003 15:00:02.664708 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.302486 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-htc7c"] Oct 03 15:00:03 crc kubenswrapper[4861]: W1003 15:00:03.320363 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf096ca24_2f40_47ea_a942_deefad336bea.slice/crio-3a2792f0c7dafb69413e0d466f87cee871bc5c514e15e405e31f3e18036013d5 WatchSource:0}: Error finding container 3a2792f0c7dafb69413e0d466f87cee871bc5c514e15e405e31f3e18036013d5: Status 404 returned error can't find the container with id 3a2792f0c7dafb69413e0d466f87cee871bc5c514e15e405e31f3e18036013d5 Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.526207 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.543492 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcgpd\" (UniqueName: \"kubernetes.io/projected/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-kube-api-access-wcgpd\") pod \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.543589 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-secret-volume\") pod \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.543714 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-config-volume\") pod \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\" (UID: \"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d\") " Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.544745 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-config-volume" (OuterVolumeSpecName: "config-volume") pod "1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" (UID: "1fa5fd87-24ff-4cec-9b8a-0a4882a9881d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.553905 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" (UID: "1fa5fd87-24ff-4cec-9b8a-0a4882a9881d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.555191 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-kube-api-access-wcgpd" (OuterVolumeSpecName: "kube-api-access-wcgpd") pod "1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" (UID: "1fa5fd87-24ff-4cec-9b8a-0a4882a9881d"). InnerVolumeSpecName "kube-api-access-wcgpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.645992 4861 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.646020 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcgpd\" (UniqueName: \"kubernetes.io/projected/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-kube-api-access-wcgpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:00:03 crc kubenswrapper[4861]: I1003 15:00:03.646030 4861 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fa5fd87-24ff-4cec-9b8a-0a4882a9881d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.290918 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" event={"ID":"1fa5fd87-24ff-4cec-9b8a-0a4882a9881d","Type":"ContainerDied","Data":"55b414cb1d395b7b4efeb84425b3d42501e86fc95a48388c398f4645324a4cd0"} Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.290981 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55b414cb1d395b7b4efeb84425b3d42501e86fc95a48388c398f4645324a4cd0" Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.291070 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325060-jhmch" Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.316807 4861 generic.go:334] "Generic (PLEG): container finished" podID="f096ca24-2f40-47ea-a942-deefad336bea" containerID="1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26" exitCode=0 Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.316854 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerDied","Data":"1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26"} Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.316885 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerStarted","Data":"3a2792f0c7dafb69413e0d466f87cee871bc5c514e15e405e31f3e18036013d5"} Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.325693 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.386556 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b"] Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.400741 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-5cw5b"] Oct 03 15:00:04 crc kubenswrapper[4861]: I1003 15:00:04.691766 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58941f0d-ab00-4963-8608-dbbe6047db92" path="/var/lib/kubelet/pods/58941f0d-ab00-4963-8608-dbbe6047db92/volumes" Oct 03 15:00:06 crc kubenswrapper[4861]: I1003 15:00:06.345166 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerStarted","Data":"6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8"} Oct 03 15:00:07 crc kubenswrapper[4861]: I1003 15:00:07.355056 4861 generic.go:334] "Generic (PLEG): container finished" podID="f096ca24-2f40-47ea-a942-deefad336bea" containerID="6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8" exitCode=0 Oct 03 15:00:07 crc kubenswrapper[4861]: I1003 15:00:07.355173 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerDied","Data":"6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8"} Oct 03 15:00:08 crc kubenswrapper[4861]: I1003 15:00:08.372191 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerStarted","Data":"670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497"} Oct 03 15:00:08 crc kubenswrapper[4861]: I1003 15:00:08.394199 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-htc7c" podStartSLOduration=2.854439105 podStartE2EDuration="6.394182702s" podCreationTimestamp="2025-10-03 15:00:02 +0000 UTC" firstStartedPulling="2025-10-03 15:00:04.325077982 +0000 UTC m=+5318.323063119" lastFinishedPulling="2025-10-03 15:00:07.864821639 +0000 UTC m=+5321.862806716" observedRunningTime="2025-10-03 15:00:08.390264738 +0000 UTC m=+5322.388249795" watchObservedRunningTime="2025-10-03 15:00:08.394182702 +0000 UTC m=+5322.392167759" Oct 03 15:00:12 crc kubenswrapper[4861]: I1003 15:00:12.665780 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:12 crc kubenswrapper[4861]: I1003 15:00:12.666571 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:12 crc kubenswrapper[4861]: I1003 15:00:12.715984 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:13 crc kubenswrapper[4861]: I1003 15:00:13.534385 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:13 crc kubenswrapper[4861]: I1003 15:00:13.612254 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-htc7c"] Oct 03 15:00:15 crc kubenswrapper[4861]: I1003 15:00:15.455674 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-htc7c" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="registry-server" containerID="cri-o://670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497" gracePeriod=2 Oct 03 15:00:15 crc kubenswrapper[4861]: I1003 15:00:15.994354 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.195451 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjqsx\" (UniqueName: \"kubernetes.io/projected/f096ca24-2f40-47ea-a942-deefad336bea-kube-api-access-vjqsx\") pod \"f096ca24-2f40-47ea-a942-deefad336bea\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.195521 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-catalog-content\") pod \"f096ca24-2f40-47ea-a942-deefad336bea\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.195591 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-utilities\") pod \"f096ca24-2f40-47ea-a942-deefad336bea\" (UID: \"f096ca24-2f40-47ea-a942-deefad336bea\") " Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.197177 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-utilities" (OuterVolumeSpecName: "utilities") pod "f096ca24-2f40-47ea-a942-deefad336bea" (UID: "f096ca24-2f40-47ea-a942-deefad336bea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.202761 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f096ca24-2f40-47ea-a942-deefad336bea-kube-api-access-vjqsx" (OuterVolumeSpecName: "kube-api-access-vjqsx") pod "f096ca24-2f40-47ea-a942-deefad336bea" (UID: "f096ca24-2f40-47ea-a942-deefad336bea"). InnerVolumeSpecName "kube-api-access-vjqsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.302511 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjqsx\" (UniqueName: \"kubernetes.io/projected/f096ca24-2f40-47ea-a942-deefad336bea-kube-api-access-vjqsx\") on node \"crc\" DevicePath \"\"" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.302554 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.451773 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f096ca24-2f40-47ea-a942-deefad336bea" (UID: "f096ca24-2f40-47ea-a942-deefad336bea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.467702 4861 generic.go:334] "Generic (PLEG): container finished" podID="f096ca24-2f40-47ea-a942-deefad336bea" containerID="670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497" exitCode=0 Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.467769 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htc7c" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.467788 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerDied","Data":"670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497"} Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.468102 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htc7c" event={"ID":"f096ca24-2f40-47ea-a942-deefad336bea","Type":"ContainerDied","Data":"3a2792f0c7dafb69413e0d466f87cee871bc5c514e15e405e31f3e18036013d5"} Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.468121 4861 scope.go:117] "RemoveContainer" containerID="670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.484468 4861 scope.go:117] "RemoveContainer" containerID="6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.510579 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f096ca24-2f40-47ea-a942-deefad336bea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.514254 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-htc7c"] Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.521021 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-htc7c"] Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.524376 4861 scope.go:117] "RemoveContainer" containerID="1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.567993 4861 scope.go:117] "RemoveContainer" containerID="670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497" Oct 03 15:00:16 crc kubenswrapper[4861]: E1003 15:00:16.569015 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497\": container with ID starting with 670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497 not found: ID does not exist" containerID="670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.569099 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497"} err="failed to get container status \"670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497\": rpc error: code = NotFound desc = could not find container \"670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497\": container with ID starting with 670b3e97ad6a1c1746602feeae7f7c7e638a6f7b466577cc92f0092ffc83e497 not found: ID does not exist" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.569150 4861 scope.go:117] "RemoveContainer" containerID="6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8" Oct 03 15:00:16 crc kubenswrapper[4861]: E1003 15:00:16.569711 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8\": container with ID starting with 6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8 not found: ID does not exist" containerID="6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.569868 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8"} err="failed to get container status \"6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8\": rpc error: code = NotFound desc = could not find container \"6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8\": container with ID starting with 6896c9cc2563c155b8ec747badfaf8c8ff755d1d984066d5bae727725c4cede8 not found: ID does not exist" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.569969 4861 scope.go:117] "RemoveContainer" containerID="1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26" Oct 03 15:00:16 crc kubenswrapper[4861]: E1003 15:00:16.571495 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26\": container with ID starting with 1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26 not found: ID does not exist" containerID="1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.571528 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26"} err="failed to get container status \"1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26\": rpc error: code = NotFound desc = could not find container \"1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26\": container with ID starting with 1431015bd044e6e6c7670117aa34e265a54f5ea95f43af6710bf4f376cfb5d26 not found: ID does not exist" Oct 03 15:00:16 crc kubenswrapper[4861]: I1003 15:00:16.699975 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f096ca24-2f40-47ea-a942-deefad336bea" path="/var/lib/kubelet/pods/f096ca24-2f40-47ea-a942-deefad336bea/volumes" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.799044 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4tdkf/must-gather-xlw45"] Oct 03 15:00:20 crc kubenswrapper[4861]: E1003 15:00:20.800785 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="registry-server" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.800830 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="registry-server" Oct 03 15:00:20 crc kubenswrapper[4861]: E1003 15:00:20.800895 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="extract-content" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.800904 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="extract-content" Oct 03 15:00:20 crc kubenswrapper[4861]: E1003 15:00:20.800937 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" containerName="collect-profiles" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.800946 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" containerName="collect-profiles" Oct 03 15:00:20 crc kubenswrapper[4861]: E1003 15:00:20.800979 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="extract-utilities" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.800987 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="extract-utilities" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.801420 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="f096ca24-2f40-47ea-a942-deefad336bea" containerName="registry-server" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.801469 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa5fd87-24ff-4cec-9b8a-0a4882a9881d" containerName="collect-profiles" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.803015 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.810724 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4tdkf"/"openshift-service-ca.crt" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.810932 4861 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4tdkf"/"kube-root-ca.crt" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.811288 4861 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4tdkf"/"default-dockercfg-6tvp5" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.827165 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4tdkf/must-gather-xlw45"] Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.914759 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dn7s\" (UniqueName: \"kubernetes.io/projected/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-kube-api-access-8dn7s\") pod \"must-gather-xlw45\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:20 crc kubenswrapper[4861]: I1003 15:00:20.915033 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-must-gather-output\") pod \"must-gather-xlw45\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.016242 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-must-gather-output\") pod \"must-gather-xlw45\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.016318 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dn7s\" (UniqueName: \"kubernetes.io/projected/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-kube-api-access-8dn7s\") pod \"must-gather-xlw45\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.016750 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-must-gather-output\") pod \"must-gather-xlw45\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.034907 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dn7s\" (UniqueName: \"kubernetes.io/projected/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-kube-api-access-8dn7s\") pod \"must-gather-xlw45\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.134824 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.497321 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4tdkf/must-gather-xlw45"] Oct 03 15:00:21 crc kubenswrapper[4861]: I1003 15:00:21.543802 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/must-gather-xlw45" event={"ID":"5c7fbe42-081b-46c6-a12f-ce1c9e30020d","Type":"ContainerStarted","Data":"0ab6a00ea2fbbb41f2535a49f125601c2f9aa2a30d064776b37a8447abcf499d"} Oct 03 15:00:22 crc kubenswrapper[4861]: I1003 15:00:22.554775 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/must-gather-xlw45" event={"ID":"5c7fbe42-081b-46c6-a12f-ce1c9e30020d","Type":"ContainerStarted","Data":"8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247"} Oct 03 15:00:22 crc kubenswrapper[4861]: I1003 15:00:22.555245 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/must-gather-xlw45" event={"ID":"5c7fbe42-081b-46c6-a12f-ce1c9e30020d","Type":"ContainerStarted","Data":"7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616"} Oct 03 15:00:22 crc kubenswrapper[4861]: I1003 15:00:22.571307 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4tdkf/must-gather-xlw45" podStartSLOduration=2.571222054 podStartE2EDuration="2.571222054s" podCreationTimestamp="2025-10-03 15:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:00:22.567943346 +0000 UTC m=+5336.565928393" watchObservedRunningTime="2025-10-03 15:00:22.571222054 +0000 UTC m=+5336.569207091" Oct 03 15:00:25 crc kubenswrapper[4861]: I1003 15:00:25.891682 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-8b768"] Oct 03 15:00:25 crc kubenswrapper[4861]: I1003 15:00:25.893449 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.003521 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-host\") pod \"crc-debug-8b768\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.003757 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n4v7\" (UniqueName: \"kubernetes.io/projected/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-kube-api-access-4n4v7\") pod \"crc-debug-8b768\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.105849 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n4v7\" (UniqueName: \"kubernetes.io/projected/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-kube-api-access-4n4v7\") pod \"crc-debug-8b768\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.105991 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-host\") pod \"crc-debug-8b768\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.106088 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-host\") pod \"crc-debug-8b768\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.134129 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n4v7\" (UniqueName: \"kubernetes.io/projected/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-kube-api-access-4n4v7\") pod \"crc-debug-8b768\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.215917 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:00:26 crc kubenswrapper[4861]: W1003 15:00:26.281980 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb09bbf98_9e9c_4b1b_9245_ed6b5fc3d69b.slice/crio-c85bee1db953fe72aceb011f73eb544d3265dd0c3efbd580442c0f0c7fe3a21d WatchSource:0}: Error finding container c85bee1db953fe72aceb011f73eb544d3265dd0c3efbd580442c0f0c7fe3a21d: Status 404 returned error can't find the container with id c85bee1db953fe72aceb011f73eb544d3265dd0c3efbd580442c0f0c7fe3a21d Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.585359 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-8b768" event={"ID":"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b","Type":"ContainerStarted","Data":"42b48bad9cba956d1eba9d94fbb4e201365d5279204b1341c523017a087a5da7"} Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.585637 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-8b768" event={"ID":"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b","Type":"ContainerStarted","Data":"c85bee1db953fe72aceb011f73eb544d3265dd0c3efbd580442c0f0c7fe3a21d"} Oct 03 15:00:26 crc kubenswrapper[4861]: I1003 15:00:26.605318 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4tdkf/crc-debug-8b768" podStartSLOduration=1.605302249 podStartE2EDuration="1.605302249s" podCreationTimestamp="2025-10-03 15:00:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:00:26.600885351 +0000 UTC m=+5340.598870398" watchObservedRunningTime="2025-10-03 15:00:26.605302249 +0000 UTC m=+5340.603287296" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.146826 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29325061-8zrvn"] Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.148336 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.164282 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325061-8zrvn"] Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.242727 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-fernet-keys\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.243020 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-config-data\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.243094 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-combined-ca-bundle\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.243130 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k79tc\" (UniqueName: \"kubernetes.io/projected/34b9798c-bdf0-4869-ba43-75c3c3261258-kube-api-access-k79tc\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.344997 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-fernet-keys\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.345115 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-config-data\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.345147 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-combined-ca-bundle\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.345178 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k79tc\" (UniqueName: \"kubernetes.io/projected/34b9798c-bdf0-4869-ba43-75c3c3261258-kube-api-access-k79tc\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.352002 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-fernet-keys\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.352554 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-config-data\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.366716 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-combined-ca-bundle\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.370684 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k79tc\" (UniqueName: \"kubernetes.io/projected/34b9798c-bdf0-4869-ba43-75c3c3261258-kube-api-access-k79tc\") pod \"keystone-cron-29325061-8zrvn\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.512625 4861 scope.go:117] "RemoveContainer" containerID="a084be5b16415ec0457f508cbb3de9d68399fe163dfd909171e3d8b1bec10e99" Oct 03 15:01:00 crc kubenswrapper[4861]: I1003 15:01:00.523010 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:01 crc kubenswrapper[4861]: W1003 15:01:01.046196 4861 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34b9798c_bdf0_4869_ba43_75c3c3261258.slice/crio-6461d8b04cc45e8ebe97ad92c243d4d2a633d0e2e877c7899355775fce97deac WatchSource:0}: Error finding container 6461d8b04cc45e8ebe97ad92c243d4d2a633d0e2e877c7899355775fce97deac: Status 404 returned error can't find the container with id 6461d8b04cc45e8ebe97ad92c243d4d2a633d0e2e877c7899355775fce97deac Oct 03 15:01:01 crc kubenswrapper[4861]: I1003 15:01:01.046616 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325061-8zrvn"] Oct 03 15:01:01 crc kubenswrapper[4861]: I1003 15:01:01.897415 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325061-8zrvn" event={"ID":"34b9798c-bdf0-4869-ba43-75c3c3261258","Type":"ContainerStarted","Data":"0c6f3678757dd927c97d984fefbf9bd9ebd80efccb72f78ea0eb2491e7fcc28d"} Oct 03 15:01:01 crc kubenswrapper[4861]: I1003 15:01:01.897815 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325061-8zrvn" event={"ID":"34b9798c-bdf0-4869-ba43-75c3c3261258","Type":"ContainerStarted","Data":"6461d8b04cc45e8ebe97ad92c243d4d2a633d0e2e877c7899355775fce97deac"} Oct 03 15:01:01 crc kubenswrapper[4861]: I1003 15:01:01.916819 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29325061-8zrvn" podStartSLOduration=1.9168003260000002 podStartE2EDuration="1.916800326s" podCreationTimestamp="2025-10-03 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:01:01.914682199 +0000 UTC m=+5375.912667246" watchObservedRunningTime="2025-10-03 15:01:01.916800326 +0000 UTC m=+5375.914785373" Oct 03 15:01:05 crc kubenswrapper[4861]: I1003 15:01:05.928892 4861 generic.go:334] "Generic (PLEG): container finished" podID="34b9798c-bdf0-4869-ba43-75c3c3261258" containerID="0c6f3678757dd927c97d984fefbf9bd9ebd80efccb72f78ea0eb2491e7fcc28d" exitCode=0 Oct 03 15:01:05 crc kubenswrapper[4861]: I1003 15:01:05.929336 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325061-8zrvn" event={"ID":"34b9798c-bdf0-4869-ba43-75c3c3261258","Type":"ContainerDied","Data":"0c6f3678757dd927c97d984fefbf9bd9ebd80efccb72f78ea0eb2491e7fcc28d"} Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.359665 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.498261 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k79tc\" (UniqueName: \"kubernetes.io/projected/34b9798c-bdf0-4869-ba43-75c3c3261258-kube-api-access-k79tc\") pod \"34b9798c-bdf0-4869-ba43-75c3c3261258\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.498313 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-config-data\") pod \"34b9798c-bdf0-4869-ba43-75c3c3261258\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.498480 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-fernet-keys\") pod \"34b9798c-bdf0-4869-ba43-75c3c3261258\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.498595 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-combined-ca-bundle\") pod \"34b9798c-bdf0-4869-ba43-75c3c3261258\" (UID: \"34b9798c-bdf0-4869-ba43-75c3c3261258\") " Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.504505 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "34b9798c-bdf0-4869-ba43-75c3c3261258" (UID: "34b9798c-bdf0-4869-ba43-75c3c3261258"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.509724 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b9798c-bdf0-4869-ba43-75c3c3261258-kube-api-access-k79tc" (OuterVolumeSpecName: "kube-api-access-k79tc") pod "34b9798c-bdf0-4869-ba43-75c3c3261258" (UID: "34b9798c-bdf0-4869-ba43-75c3c3261258"). InnerVolumeSpecName "kube-api-access-k79tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.567514 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-config-data" (OuterVolumeSpecName: "config-data") pod "34b9798c-bdf0-4869-ba43-75c3c3261258" (UID: "34b9798c-bdf0-4869-ba43-75c3c3261258"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.586689 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34b9798c-bdf0-4869-ba43-75c3c3261258" (UID: "34b9798c-bdf0-4869-ba43-75c3c3261258"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.600445 4861 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.600478 4861 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.600495 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k79tc\" (UniqueName: \"kubernetes.io/projected/34b9798c-bdf0-4869-ba43-75c3c3261258-kube-api-access-k79tc\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.600508 4861 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b9798c-bdf0-4869-ba43-75c3c3261258-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.961756 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325061-8zrvn" event={"ID":"34b9798c-bdf0-4869-ba43-75c3c3261258","Type":"ContainerDied","Data":"6461d8b04cc45e8ebe97ad92c243d4d2a633d0e2e877c7899355775fce97deac"} Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.961970 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6461d8b04cc45e8ebe97ad92c243d4d2a633d0e2e877c7899355775fce97deac" Oct 03 15:01:07 crc kubenswrapper[4861]: I1003 15:01:07.962044 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325061-8zrvn" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.151990 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-twpbb"] Oct 03 15:01:16 crc kubenswrapper[4861]: E1003 15:01:16.154697 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b9798c-bdf0-4869-ba43-75c3c3261258" containerName="keystone-cron" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.154713 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b9798c-bdf0-4869-ba43-75c3c3261258" containerName="keystone-cron" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.154900 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b9798c-bdf0-4869-ba43-75c3c3261258" containerName="keystone-cron" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.156116 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.169346 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-twpbb"] Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.267157 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-utilities\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.267327 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-catalog-content\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.267371 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ck7n\" (UniqueName: \"kubernetes.io/projected/4b18193b-fa5a-448b-8557-5d11bfdb8483-kube-api-access-9ck7n\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.368424 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-utilities\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.368553 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-catalog-content\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.368594 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ck7n\" (UniqueName: \"kubernetes.io/projected/4b18193b-fa5a-448b-8557-5d11bfdb8483-kube-api-access-9ck7n\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.369442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-utilities\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.369442 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-catalog-content\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.403377 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ck7n\" (UniqueName: \"kubernetes.io/projected/4b18193b-fa5a-448b-8557-5d11bfdb8483-kube-api-access-9ck7n\") pod \"redhat-operators-twpbb\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:16 crc kubenswrapper[4861]: I1003 15:01:16.497722 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:17 crc kubenswrapper[4861]: I1003 15:01:17.033064 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-twpbb"] Oct 03 15:01:18 crc kubenswrapper[4861]: I1003 15:01:18.053551 4861 generic.go:334] "Generic (PLEG): container finished" podID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerID="53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e" exitCode=0 Oct 03 15:01:18 crc kubenswrapper[4861]: I1003 15:01:18.053807 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerDied","Data":"53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e"} Oct 03 15:01:18 crc kubenswrapper[4861]: I1003 15:01:18.053831 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerStarted","Data":"c4ba7c25470a0dd7fc08d964ccd6dd72ac8ba2046dc1f4177eae686559d2f937"} Oct 03 15:01:20 crc kubenswrapper[4861]: I1003 15:01:20.074531 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerStarted","Data":"2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9"} Oct 03 15:01:23 crc kubenswrapper[4861]: I1003 15:01:23.099064 4861 generic.go:334] "Generic (PLEG): container finished" podID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerID="2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9" exitCode=0 Oct 03 15:01:23 crc kubenswrapper[4861]: I1003 15:01:23.099267 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerDied","Data":"2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9"} Oct 03 15:01:24 crc kubenswrapper[4861]: I1003 15:01:24.108841 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerStarted","Data":"44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c"} Oct 03 15:01:24 crc kubenswrapper[4861]: I1003 15:01:24.133092 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-twpbb" podStartSLOduration=2.6560097430000003 podStartE2EDuration="8.133076365s" podCreationTimestamp="2025-10-03 15:01:16 +0000 UTC" firstStartedPulling="2025-10-03 15:01:18.055658837 +0000 UTC m=+5392.053643884" lastFinishedPulling="2025-10-03 15:01:23.532725459 +0000 UTC m=+5397.530710506" observedRunningTime="2025-10-03 15:01:24.125176282 +0000 UTC m=+5398.123161329" watchObservedRunningTime="2025-10-03 15:01:24.133076365 +0000 UTC m=+5398.131061412" Oct 03 15:01:26 crc kubenswrapper[4861]: I1003 15:01:26.498814 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:26 crc kubenswrapper[4861]: I1003 15:01:26.499200 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:27 crc kubenswrapper[4861]: I1003 15:01:27.556768 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-twpbb" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="registry-server" probeResult="failure" output=< Oct 03 15:01:27 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 15:01:27 crc kubenswrapper[4861]: > Oct 03 15:01:30 crc kubenswrapper[4861]: I1003 15:01:30.145124 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:01:30 crc kubenswrapper[4861]: I1003 15:01:30.145562 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:01:37 crc kubenswrapper[4861]: I1003 15:01:37.542358 4861 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-twpbb" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="registry-server" probeResult="failure" output=< Oct 03 15:01:37 crc kubenswrapper[4861]: timeout: failed to connect service ":50051" within 1s Oct 03 15:01:37 crc kubenswrapper[4861]: > Oct 03 15:01:46 crc kubenswrapper[4861]: I1003 15:01:46.561649 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:46 crc kubenswrapper[4861]: I1003 15:01:46.628569 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:47 crc kubenswrapper[4861]: I1003 15:01:47.359602 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-twpbb"] Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.304164 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-twpbb" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="registry-server" containerID="cri-o://44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c" gracePeriod=2 Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.844416 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.880533 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ck7n\" (UniqueName: \"kubernetes.io/projected/4b18193b-fa5a-448b-8557-5d11bfdb8483-kube-api-access-9ck7n\") pod \"4b18193b-fa5a-448b-8557-5d11bfdb8483\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.880638 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-catalog-content\") pod \"4b18193b-fa5a-448b-8557-5d11bfdb8483\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.880684 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-utilities\") pod \"4b18193b-fa5a-448b-8557-5d11bfdb8483\" (UID: \"4b18193b-fa5a-448b-8557-5d11bfdb8483\") " Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.881716 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-utilities" (OuterVolumeSpecName: "utilities") pod "4b18193b-fa5a-448b-8557-5d11bfdb8483" (UID: "4b18193b-fa5a-448b-8557-5d11bfdb8483"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.898409 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b18193b-fa5a-448b-8557-5d11bfdb8483-kube-api-access-9ck7n" (OuterVolumeSpecName: "kube-api-access-9ck7n") pod "4b18193b-fa5a-448b-8557-5d11bfdb8483" (UID: "4b18193b-fa5a-448b-8557-5d11bfdb8483"). InnerVolumeSpecName "kube-api-access-9ck7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.977994 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b18193b-fa5a-448b-8557-5d11bfdb8483" (UID: "4b18193b-fa5a-448b-8557-5d11bfdb8483"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.984063 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ck7n\" (UniqueName: \"kubernetes.io/projected/4b18193b-fa5a-448b-8557-5d11bfdb8483-kube-api-access-9ck7n\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.984094 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:48 crc kubenswrapper[4861]: I1003 15:01:48.984105 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b18193b-fa5a-448b-8557-5d11bfdb8483-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.313294 4861 generic.go:334] "Generic (PLEG): container finished" podID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerID="44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c" exitCode=0 Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.313346 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerDied","Data":"44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c"} Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.313380 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twpbb" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.313398 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twpbb" event={"ID":"4b18193b-fa5a-448b-8557-5d11bfdb8483","Type":"ContainerDied","Data":"c4ba7c25470a0dd7fc08d964ccd6dd72ac8ba2046dc1f4177eae686559d2f937"} Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.313418 4861 scope.go:117] "RemoveContainer" containerID="44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.342052 4861 scope.go:117] "RemoveContainer" containerID="2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.420251 4861 scope.go:117] "RemoveContainer" containerID="53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.420843 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-twpbb"] Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.465769 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-twpbb"] Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.475511 4861 scope.go:117] "RemoveContainer" containerID="44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c" Oct 03 15:01:49 crc kubenswrapper[4861]: E1003 15:01:49.481035 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c\": container with ID starting with 44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c not found: ID does not exist" containerID="44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.481071 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c"} err="failed to get container status \"44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c\": rpc error: code = NotFound desc = could not find container \"44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c\": container with ID starting with 44b6e4d6b333a1abd0400f044ec1c375010965409e6c6ec235d1bdaa9c91f60c not found: ID does not exist" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.481095 4861 scope.go:117] "RemoveContainer" containerID="2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9" Oct 03 15:01:49 crc kubenswrapper[4861]: E1003 15:01:49.492140 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9\": container with ID starting with 2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9 not found: ID does not exist" containerID="2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.492178 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9"} err="failed to get container status \"2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9\": rpc error: code = NotFound desc = could not find container \"2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9\": container with ID starting with 2bbcf0dad6e214f949b88cbfa99e67413cd9c98f50d88e51127b4d26320f31b9 not found: ID does not exist" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.492201 4861 scope.go:117] "RemoveContainer" containerID="53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e" Oct 03 15:01:49 crc kubenswrapper[4861]: E1003 15:01:49.493518 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e\": container with ID starting with 53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e not found: ID does not exist" containerID="53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.493567 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e"} err="failed to get container status \"53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e\": rpc error: code = NotFound desc = could not find container \"53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e\": container with ID starting with 53f8eb6ead3b92706df113522b18d47a0b0298b73289d5ccecca6efb5c69d74e not found: ID does not exist" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.630266 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64fc59b774-2zcpl_cf1005d0-0ea7-4d8a-bec8-445949aa9162/barbican-api/0.log" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.734777 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64fc59b774-2zcpl_cf1005d0-0ea7-4d8a-bec8-445949aa9162/barbican-api-log/0.log" Oct 03 15:01:49 crc kubenswrapper[4861]: I1003 15:01:49.853666 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b56bc9586-w989t_fc6983c6-4e21-49b6-a48a-f062bb5afd49/barbican-keystone-listener/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.018570 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b56bc9586-w989t_fc6983c6-4e21-49b6-a48a-f062bb5afd49/barbican-keystone-listener-log/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.066999 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79b8bfc47f-5jchw_ba4d9d03-a7d0-46ed-8429-008882213b57/barbican-worker/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.234912 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79b8bfc47f-5jchw_ba4d9d03-a7d0-46ed-8429-008882213b57/barbican-worker-log/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.350825 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-8zh9n_88cfdf25-6cf6-4553-a95b-d49e13d2f509/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.657710 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/ceilometer-notification-agent/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.666528 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/ceilometer-central-agent/0.log" Oct 03 15:01:50 crc kubenswrapper[4861]: I1003 15:01:50.690735 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" path="/var/lib/kubelet/pods/4b18193b-fa5a-448b-8557-5d11bfdb8483/volumes" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.092439 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/proxy-httpd/0.log" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.140459 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_14629b29-a838-444c-9acf-42c14b7dbe5a/sg-core/0.log" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.312561 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_de8f27cb-a40f-4ab0-b709-4abad3ff72bb/cinder-api/0.log" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.434763 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_de8f27cb-a40f-4ab0-b709-4abad3ff72bb/cinder-api-log/0.log" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.616324 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8af097a4-c83f-4687-9804-fded6b1eb9ac/cinder-scheduler/0.log" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.719674 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8af097a4-c83f-4687-9804-fded6b1eb9ac/probe/0.log" Oct 03 15:01:51 crc kubenswrapper[4861]: I1003 15:01:51.934061 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-2p8f9_15a74413-2b7d-42e1-9b05-e50d739dfd39/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:52 crc kubenswrapper[4861]: I1003 15:01:52.086802 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-528b8_2b2349a3-d6ca-4e6f-a564-03dac17e4746/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:52 crc kubenswrapper[4861]: I1003 15:01:52.268509 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-smz4r_f353c9e7-1d8f-4084-b475-0c725858f034/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:52 crc kubenswrapper[4861]: I1003 15:01:52.446572 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-xjmx4_f6f37303-3f4a-44b7-aef2-ed92a6c277e2/init/0.log" Oct 03 15:01:52 crc kubenswrapper[4861]: I1003 15:01:52.715516 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-xjmx4_f6f37303-3f4a-44b7-aef2-ed92a6c277e2/init/0.log" Oct 03 15:01:52 crc kubenswrapper[4861]: I1003 15:01:52.803025 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-xjmx4_f6f37303-3f4a-44b7-aef2-ed92a6c277e2/dnsmasq-dns/0.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.007493 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-8tkzn_e5903a2f-8943-4fab-8ddf-6ec1b8329590/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.086183 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3f0d752d-7682-4244-9682-bf78e9a9d8ec/glance-httpd/0.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.198822 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3f0d752d-7682-4244-9682-bf78e9a9d8ec/glance-log/0.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.386295 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a2693b41-01f0-48e9-b551-fa6c48d29531/glance-httpd/0.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.426615 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a2693b41-01f0-48e9-b551-fa6c48d29531/glance-log/0.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.644313 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6c8cb9d9fb-bt6ls_81ec621b-cc30-4ab2-ae0e-bdd71629009f/horizon/1.log" Oct 03 15:01:53 crc kubenswrapper[4861]: I1003 15:01:53.720888 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6c8cb9d9fb-bt6ls_81ec621b-cc30-4ab2-ae0e-bdd71629009f/horizon/0.log" Oct 03 15:01:54 crc kubenswrapper[4861]: I1003 15:01:54.003689 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-69f6f_e23c8e2c-00d8-43d0-a10a-5f7fef662315/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:54 crc kubenswrapper[4861]: I1003 15:01:54.159359 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6c8cb9d9fb-bt6ls_81ec621b-cc30-4ab2-ae0e-bdd71629009f/horizon-log/0.log" Oct 03 15:01:54 crc kubenswrapper[4861]: I1003 15:01:54.220274 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-hk68t_fd61a6d8-4264-4d03-8891-c1bdf462fa7b/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:54 crc kubenswrapper[4861]: I1003 15:01:54.321074 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29325001-vhzr2_eb217f97-9f13-4711-b3ab-f449bdc34bae/keystone-cron/0.log" Oct 03 15:01:54 crc kubenswrapper[4861]: I1003 15:01:54.697872 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29325061-8zrvn_34b9798c-bdf0-4869-ba43-75c3c3261258/keystone-cron/0.log" Oct 03 15:01:54 crc kubenswrapper[4861]: I1003 15:01:54.961091 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ccb19566-aa49-4551-9fbf-a05affdd60e2/kube-state-metrics/0.log" Oct 03 15:01:55 crc kubenswrapper[4861]: I1003 15:01:55.243043 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-d84968f5f-dw9pq_fbeb45d8-15f9-47b5-b6af-f578362eda62/keystone-api/0.log" Oct 03 15:01:55 crc kubenswrapper[4861]: I1003 15:01:55.273712 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6z5r9_fd4c1aae-4d8b-49be-ad63-d6531b244f73/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:55 crc kubenswrapper[4861]: I1003 15:01:55.823878 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55bc9b5c77-pkdzr_b539b449-5e16-4bb4-8931-ba6c9ad1df7d/neutron-httpd/0.log" Oct 03 15:01:56 crc kubenswrapper[4861]: I1003 15:01:56.092181 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-8zld2_5a870822-9c29-4acb-b63c-2ff86a95a9fc/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:56 crc kubenswrapper[4861]: I1003 15:01:56.126710 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55bc9b5c77-pkdzr_b539b449-5e16-4bb4-8931-ba6c9ad1df7d/neutron-api/0.log" Oct 03 15:01:57 crc kubenswrapper[4861]: I1003 15:01:57.162891 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_04bb7c95-25e1-49b2-b659-2af6e5354749/nova-cell0-conductor-conductor/0.log" Oct 03 15:01:57 crc kubenswrapper[4861]: I1003 15:01:57.717293 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9f7bb8d1-2c92-4ce3-b510-386e42fab1ac/nova-api-log/0.log" Oct 03 15:01:57 crc kubenswrapper[4861]: I1003 15:01:57.818802 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_90ae374e-4f8e-4d9c-84e8-00a5c571fd98/nova-cell1-conductor-conductor/0.log" Oct 03 15:01:58 crc kubenswrapper[4861]: I1003 15:01:58.203724 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9f7bb8d1-2c92-4ce3-b510-386e42fab1ac/nova-api-api/0.log" Oct 03 15:01:58 crc kubenswrapper[4861]: I1003 15:01:58.505867 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_9b155fef-8eef-48f9-a6fe-b76d46ddadb0/nova-cell1-novncproxy-novncproxy/0.log" Oct 03 15:01:58 crc kubenswrapper[4861]: I1003 15:01:58.657055 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-mlf7m_bc64fc8f-fe84-485b-8d52-a4e26a00435a/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:01:58 crc kubenswrapper[4861]: I1003 15:01:58.899982 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_92433e8c-4d6d-4a9d-a492-192863eed46c/nova-metadata-log/0.log" Oct 03 15:01:59 crc kubenswrapper[4861]: I1003 15:01:59.685465 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_cc0949fe-630b-4f65-9c6c-7a87272586a2/mysql-bootstrap/0.log" Oct 03 15:01:59 crc kubenswrapper[4861]: I1003 15:01:59.804004 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c364afdb-ff79-4011-abeb-243d45ea7b95/nova-scheduler-scheduler/0.log" Oct 03 15:01:59 crc kubenswrapper[4861]: I1003 15:01:59.870776 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_cc0949fe-630b-4f65-9c6c-7a87272586a2/mysql-bootstrap/0.log" Oct 03 15:01:59 crc kubenswrapper[4861]: I1003 15:01:59.878764 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7464ed9c-8f08-4c7a-8ca3-8a57734bd31f/memcached/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.145028 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.145292 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.230494 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1/mysql-bootstrap/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.268672 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_cc0949fe-630b-4f65-9c6c-7a87272586a2/galera/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.509585 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1/galera/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.570404 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e729e54c-b7bc-46e8-94c8-ef5d8a4f42b1/mysql-bootstrap/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.630870 4861 scope.go:117] "RemoveContainer" containerID="fa8f98f5220911023149a79a7816b7eeadcac429f8627ed52354f1b5a47d935d" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.709528 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_92433e8c-4d6d-4a9d-a492-192863eed46c/nova-metadata-metadata/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.833136 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_08030cdc-19c1-46f6-940c-48d493ce3880/openstackclient/0.log" Oct 03 15:02:00 crc kubenswrapper[4861]: I1003 15:02:00.923411 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-xsmhv_0636b1a6-6f21-4d14-8a07-014a3e9395c7/openstack-network-exporter/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.077811 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovsdb-server-init/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.256216 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovsdb-server-init/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.296582 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovsdb-server/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.301334 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-j8c4m_72bbb704-bf32-46a9-9540-32c2e385f8ab/ovs-vswitchd/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.507391 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-x5wkc_ef2b767c-ef6a-4364-a3f0-14b68bee3986/ovn-controller/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.600551 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-7c2gq_defed5cd-53ca-4e60-af05-a4c425abbf60/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.744444 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b310d13d-da67-4151-948d-36a86d413a27/openstack-network-exporter/0.log" Oct 03 15:02:01 crc kubenswrapper[4861]: I1003 15:02:01.816644 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b310d13d-da67-4151-948d-36a86d413a27/ovn-northd/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.182645 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6714f489-ceb2-4b99-a61d-fe45289bed5f/ovsdbserver-nb/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.217986 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6714f489-ceb2-4b99-a61d-fe45289bed5f/openstack-network-exporter/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.414634 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a361bfa1-97ef-4ee2-bcfe-3763898cbc32/openstack-network-exporter/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.467444 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a361bfa1-97ef-4ee2-bcfe-3763898cbc32/ovsdbserver-sb/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.711898 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b222a9ce-46d6-4caf-b76d-f6b773276cb1/setup-container/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.775662 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c6f6cb4f6-xc49q_37f329ec-eb69-4d87-a22b-ace765cef57f/placement-api/0.log" Oct 03 15:02:02 crc kubenswrapper[4861]: I1003 15:02:02.834727 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c6f6cb4f6-xc49q_37f329ec-eb69-4d87-a22b-ace765cef57f/placement-log/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.006309 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b222a9ce-46d6-4caf-b76d-f6b773276cb1/setup-container/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.090667 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b222a9ce-46d6-4caf-b76d-f6b773276cb1/rabbitmq/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.107840 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0494d758-5f63-45c6-930c-f34b43484fd9/setup-container/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.220992 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0494d758-5f63-45c6-930c-f34b43484fd9/setup-container/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.345426 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-g587s_b68af2ce-5dae-47da-801b-a2ad6a6b8db1/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.348030 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0494d758-5f63-45c6-930c-f34b43484fd9/rabbitmq/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.506348 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-js7xl_a5b6d421-13d1-4c5b-b244-087790b16c8b/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.668920 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jjs67_bb1b5aa4-44c6-475c-8995-ac100260ce29/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.818399 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-996sx_6877d996-5390-4bac-8c57-cd3f25a65554/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:03 crc kubenswrapper[4861]: I1003 15:02:03.901404 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-7qzsr_e8a2edd4-057a-4eca-948e-5c3eeb2a0550/ssh-known-hosts-edpm-deployment/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.155330 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5df8ff69f5-s22b2_71387314-2734-4ddf-ba27-c27c5cc05b0b/proxy-httpd/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.163868 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5df8ff69f5-s22b2_71387314-2734-4ddf-ba27-c27c5cc05b0b/proxy-server/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.251664 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-s5lt7_971f0946-1828-4512-9b7d-2bafc5a78ef3/swift-ring-rebalance/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.350586 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-auditor/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.446308 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-reaper/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.563655 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-replicator/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.581452 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/account-server/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.645917 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-auditor/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.661945 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-replicator/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.769300 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-server/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.860163 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/container-updater/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.885933 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-expirer/0.log" Oct 03 15:02:04 crc kubenswrapper[4861]: I1003 15:02:04.940136 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-auditor/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.047515 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-replicator/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.063731 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-server/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.123039 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/object-updater/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.146465 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/rsync/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.273578 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_532954b7-a9d5-4ddb-87af-b17408a5db8b/swift-recon-cron/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.369275 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7pxf6_3f63691e-8f0c-4494-a774-46fe7aaba3c9/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.554400 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_a0580466-6d38-4ad0-a84e-dcf312f06369/tempest-tests-tempest-tests-runner/0.log" Oct 03 15:02:05 crc kubenswrapper[4861]: I1003 15:02:05.896885 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9b4f3282-6471-451a-95d3-5a1803ae69ae/test-operator-logs-container/0.log" Oct 03 15:02:06 crc kubenswrapper[4861]: I1003 15:02:06.086608 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jk5jz_c87ad9ae-123b-4fb0-a1cc-2cc0ffd85ad3/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.145340 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.146048 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.146114 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.147222 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77c3015bab5401c3cc6bb663b4e9aeb836cb468891caf7c22966f819ab8330cb"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.147375 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://77c3015bab5401c3cc6bb663b4e9aeb836cb468891caf7c22966f819ab8330cb" gracePeriod=600 Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.656483 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="77c3015bab5401c3cc6bb663b4e9aeb836cb468891caf7c22966f819ab8330cb" exitCode=0 Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.656689 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"77c3015bab5401c3cc6bb663b4e9aeb836cb468891caf7c22966f819ab8330cb"} Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.656811 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerStarted","Data":"9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd"} Oct 03 15:02:30 crc kubenswrapper[4861]: I1003 15:02:30.656838 4861 scope.go:117] "RemoveContainer" containerID="b2d449adc90f7d0841e7f09d27f801ef238eaa66c11a79a42bb54ccb6df2a132" Oct 03 15:02:36 crc kubenswrapper[4861]: I1003 15:02:36.726668 4861 generic.go:334] "Generic (PLEG): container finished" podID="b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" containerID="42b48bad9cba956d1eba9d94fbb4e201365d5279204b1341c523017a087a5da7" exitCode=0 Oct 03 15:02:36 crc kubenswrapper[4861]: I1003 15:02:36.726800 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-8b768" event={"ID":"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b","Type":"ContainerDied","Data":"42b48bad9cba956d1eba9d94fbb4e201365d5279204b1341c523017a087a5da7"} Oct 03 15:02:37 crc kubenswrapper[4861]: I1003 15:02:37.857701 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:02:37 crc kubenswrapper[4861]: I1003 15:02:37.902926 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-8b768"] Oct 03 15:02:37 crc kubenswrapper[4861]: I1003 15:02:37.913302 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-8b768"] Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.030423 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n4v7\" (UniqueName: \"kubernetes.io/projected/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-kube-api-access-4n4v7\") pod \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.030899 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-host\") pod \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\" (UID: \"b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b\") " Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.030977 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-host" (OuterVolumeSpecName: "host") pod "b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" (UID: "b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.031666 4861 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-host\") on node \"crc\" DevicePath \"\"" Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.036597 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-kube-api-access-4n4v7" (OuterVolumeSpecName: "kube-api-access-4n4v7") pod "b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" (UID: "b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b"). InnerVolumeSpecName "kube-api-access-4n4v7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.134790 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n4v7\" (UniqueName: \"kubernetes.io/projected/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b-kube-api-access-4n4v7\") on node \"crc\" DevicePath \"\"" Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.699941 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" path="/var/lib/kubelet/pods/b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b/volumes" Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.749668 4861 scope.go:117] "RemoveContainer" containerID="42b48bad9cba956d1eba9d94fbb4e201365d5279204b1341c523017a087a5da7" Oct 03 15:02:38 crc kubenswrapper[4861]: I1003 15:02:38.749854 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-8b768" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.108993 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-bbrb6"] Oct 03 15:02:39 crc kubenswrapper[4861]: E1003 15:02:39.109904 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" containerName="container-00" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.109926 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" containerName="container-00" Oct 03 15:02:39 crc kubenswrapper[4861]: E1003 15:02:39.109953 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="extract-utilities" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.109965 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="extract-utilities" Oct 03 15:02:39 crc kubenswrapper[4861]: E1003 15:02:39.110008 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="registry-server" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.110023 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="registry-server" Oct 03 15:02:39 crc kubenswrapper[4861]: E1003 15:02:39.110070 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="extract-content" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.110082 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="extract-content" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.110433 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="b09bbf98-9e9c-4b1b-9245-ed6b5fc3d69b" containerName="container-00" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.110497 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b18193b-fa5a-448b-8557-5d11bfdb8483" containerName="registry-server" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.111503 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.258471 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1964c30c-6348-4095-8813-8a2241cd9c2d-host\") pod \"crc-debug-bbrb6\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.258740 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqjwb\" (UniqueName: \"kubernetes.io/projected/1964c30c-6348-4095-8813-8a2241cd9c2d-kube-api-access-kqjwb\") pod \"crc-debug-bbrb6\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.361335 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1964c30c-6348-4095-8813-8a2241cd9c2d-host\") pod \"crc-debug-bbrb6\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.361436 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqjwb\" (UniqueName: \"kubernetes.io/projected/1964c30c-6348-4095-8813-8a2241cd9c2d-kube-api-access-kqjwb\") pod \"crc-debug-bbrb6\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.361533 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1964c30c-6348-4095-8813-8a2241cd9c2d-host\") pod \"crc-debug-bbrb6\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.403500 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqjwb\" (UniqueName: \"kubernetes.io/projected/1964c30c-6348-4095-8813-8a2241cd9c2d-kube-api-access-kqjwb\") pod \"crc-debug-bbrb6\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.443125 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.760596 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" event={"ID":"1964c30c-6348-4095-8813-8a2241cd9c2d","Type":"ContainerStarted","Data":"07047f7edfdb99885a1801e7ffcd2497206a61ee4649b56c7aea8413a806ead1"} Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.761371 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" event={"ID":"1964c30c-6348-4095-8813-8a2241cd9c2d","Type":"ContainerStarted","Data":"f8c712864aede49b7827756451d252d22674166bb4a42b33f3a691846f7592be"} Oct 03 15:02:39 crc kubenswrapper[4861]: I1003 15:02:39.777077 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" podStartSLOduration=0.777056023 podStartE2EDuration="777.056023ms" podCreationTimestamp="2025-10-03 15:02:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:02:39.775136821 +0000 UTC m=+5473.773121898" watchObservedRunningTime="2025-10-03 15:02:39.777056023 +0000 UTC m=+5473.775041100" Oct 03 15:02:40 crc kubenswrapper[4861]: I1003 15:02:40.773774 4861 generic.go:334] "Generic (PLEG): container finished" podID="1964c30c-6348-4095-8813-8a2241cd9c2d" containerID="07047f7edfdb99885a1801e7ffcd2497206a61ee4649b56c7aea8413a806ead1" exitCode=0 Oct 03 15:02:40 crc kubenswrapper[4861]: I1003 15:02:40.774927 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" event={"ID":"1964c30c-6348-4095-8813-8a2241cd9c2d","Type":"ContainerDied","Data":"07047f7edfdb99885a1801e7ffcd2497206a61ee4649b56c7aea8413a806ead1"} Oct 03 15:02:41 crc kubenswrapper[4861]: I1003 15:02:41.876504 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:41 crc kubenswrapper[4861]: I1003 15:02:41.997219 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1964c30c-6348-4095-8813-8a2241cd9c2d-host\") pod \"1964c30c-6348-4095-8813-8a2241cd9c2d\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " Oct 03 15:02:41 crc kubenswrapper[4861]: I1003 15:02:41.997326 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqjwb\" (UniqueName: \"kubernetes.io/projected/1964c30c-6348-4095-8813-8a2241cd9c2d-kube-api-access-kqjwb\") pod \"1964c30c-6348-4095-8813-8a2241cd9c2d\" (UID: \"1964c30c-6348-4095-8813-8a2241cd9c2d\") " Oct 03 15:02:41 crc kubenswrapper[4861]: I1003 15:02:41.998602 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1964c30c-6348-4095-8813-8a2241cd9c2d-host" (OuterVolumeSpecName: "host") pod "1964c30c-6348-4095-8813-8a2241cd9c2d" (UID: "1964c30c-6348-4095-8813-8a2241cd9c2d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:02:42 crc kubenswrapper[4861]: I1003 15:02:42.003133 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1964c30c-6348-4095-8813-8a2241cd9c2d-kube-api-access-kqjwb" (OuterVolumeSpecName: "kube-api-access-kqjwb") pod "1964c30c-6348-4095-8813-8a2241cd9c2d" (UID: "1964c30c-6348-4095-8813-8a2241cd9c2d"). InnerVolumeSpecName "kube-api-access-kqjwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:02:42 crc kubenswrapper[4861]: I1003 15:02:42.098949 4861 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1964c30c-6348-4095-8813-8a2241cd9c2d-host\") on node \"crc\" DevicePath \"\"" Oct 03 15:02:42 crc kubenswrapper[4861]: I1003 15:02:42.098991 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqjwb\" (UniqueName: \"kubernetes.io/projected/1964c30c-6348-4095-8813-8a2241cd9c2d-kube-api-access-kqjwb\") on node \"crc\" DevicePath \"\"" Oct 03 15:02:42 crc kubenswrapper[4861]: I1003 15:02:42.791695 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" event={"ID":"1964c30c-6348-4095-8813-8a2241cd9c2d","Type":"ContainerDied","Data":"f8c712864aede49b7827756451d252d22674166bb4a42b33f3a691846f7592be"} Oct 03 15:02:42 crc kubenswrapper[4861]: I1003 15:02:42.791964 4861 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8c712864aede49b7827756451d252d22674166bb4a42b33f3a691846f7592be" Oct 03 15:02:42 crc kubenswrapper[4861]: I1003 15:02:42.791747 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-bbrb6" Oct 03 15:02:47 crc kubenswrapper[4861]: I1003 15:02:47.683631 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-bbrb6"] Oct 03 15:02:47 crc kubenswrapper[4861]: I1003 15:02:47.690249 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-bbrb6"] Oct 03 15:02:48 crc kubenswrapper[4861]: I1003 15:02:48.699123 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1964c30c-6348-4095-8813-8a2241cd9c2d" path="/var/lib/kubelet/pods/1964c30c-6348-4095-8813-8a2241cd9c2d/volumes" Oct 03 15:02:48 crc kubenswrapper[4861]: I1003 15:02:48.923341 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-gnjqf"] Oct 03 15:02:48 crc kubenswrapper[4861]: E1003 15:02:48.923757 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1964c30c-6348-4095-8813-8a2241cd9c2d" containerName="container-00" Oct 03 15:02:48 crc kubenswrapper[4861]: I1003 15:02:48.923778 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="1964c30c-6348-4095-8813-8a2241cd9c2d" containerName="container-00" Oct 03 15:02:48 crc kubenswrapper[4861]: I1003 15:02:48.924063 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="1964c30c-6348-4095-8813-8a2241cd9c2d" containerName="container-00" Oct 03 15:02:48 crc kubenswrapper[4861]: I1003 15:02:48.924793 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.018144 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9b1325b-4159-4047-b32e-9a2d53a97f9a-host\") pod \"crc-debug-gnjqf\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.018639 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-462bm\" (UniqueName: \"kubernetes.io/projected/d9b1325b-4159-4047-b32e-9a2d53a97f9a-kube-api-access-462bm\") pod \"crc-debug-gnjqf\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.120334 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-462bm\" (UniqueName: \"kubernetes.io/projected/d9b1325b-4159-4047-b32e-9a2d53a97f9a-kube-api-access-462bm\") pod \"crc-debug-gnjqf\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.120485 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9b1325b-4159-4047-b32e-9a2d53a97f9a-host\") pod \"crc-debug-gnjqf\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.120660 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9b1325b-4159-4047-b32e-9a2d53a97f9a-host\") pod \"crc-debug-gnjqf\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.161647 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-462bm\" (UniqueName: \"kubernetes.io/projected/d9b1325b-4159-4047-b32e-9a2d53a97f9a-kube-api-access-462bm\") pod \"crc-debug-gnjqf\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.247988 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.860693 4861 generic.go:334] "Generic (PLEG): container finished" podID="d9b1325b-4159-4047-b32e-9a2d53a97f9a" containerID="e4cd30f0ea2bbc49cbb770b59d89aee97c35753c11a3857ed95d2eec2a531689" exitCode=0 Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.860835 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" event={"ID":"d9b1325b-4159-4047-b32e-9a2d53a97f9a","Type":"ContainerDied","Data":"e4cd30f0ea2bbc49cbb770b59d89aee97c35753c11a3857ed95d2eec2a531689"} Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.861284 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" event={"ID":"d9b1325b-4159-4047-b32e-9a2d53a97f9a","Type":"ContainerStarted","Data":"901853f8ce6c8b424e782ae08f41dcb299af75d8cadd389881c248f8cb04f0fc"} Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.927487 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-gnjqf"] Oct 03 15:02:49 crc kubenswrapper[4861]: I1003 15:02:49.934553 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4tdkf/crc-debug-gnjqf"] Oct 03 15:02:50 crc kubenswrapper[4861]: I1003 15:02:50.983757 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.158575 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9b1325b-4159-4047-b32e-9a2d53a97f9a-host\") pod \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.158639 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-462bm\" (UniqueName: \"kubernetes.io/projected/d9b1325b-4159-4047-b32e-9a2d53a97f9a-kube-api-access-462bm\") pod \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\" (UID: \"d9b1325b-4159-4047-b32e-9a2d53a97f9a\") " Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.158715 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9b1325b-4159-4047-b32e-9a2d53a97f9a-host" (OuterVolumeSpecName: "host") pod "d9b1325b-4159-4047-b32e-9a2d53a97f9a" (UID: "d9b1325b-4159-4047-b32e-9a2d53a97f9a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.159023 4861 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9b1325b-4159-4047-b32e-9a2d53a97f9a-host\") on node \"crc\" DevicePath \"\"" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.164053 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9b1325b-4159-4047-b32e-9a2d53a97f9a-kube-api-access-462bm" (OuterVolumeSpecName: "kube-api-access-462bm") pod "d9b1325b-4159-4047-b32e-9a2d53a97f9a" (UID: "d9b1325b-4159-4047-b32e-9a2d53a97f9a"). InnerVolumeSpecName "kube-api-access-462bm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.261152 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-462bm\" (UniqueName: \"kubernetes.io/projected/d9b1325b-4159-4047-b32e-9a2d53a97f9a-kube-api-access-462bm\") on node \"crc\" DevicePath \"\"" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.612968 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/util/0.log" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.792514 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/pull/0.log" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.798111 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/pull/0.log" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.810631 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/util/0.log" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.882360 4861 scope.go:117] "RemoveContainer" containerID="e4cd30f0ea2bbc49cbb770b59d89aee97c35753c11a3857ed95d2eec2a531689" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.882573 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/crc-debug-gnjqf" Oct 03 15:02:51 crc kubenswrapper[4861]: I1003 15:02:51.970802 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/pull/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.001961 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/extract/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.013353 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4bc19491273fb05f10b8b1261ee07db0d4b5cae179a5dad352c99ff354t6br5_439d7722-c1ce-4a61-b781-499e9278b8d5/util/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.168379 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-vjdcg_b7d80f0f-8c96-446e-a31e-90913d19d661/kube-rbac-proxy/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.223851 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-vjdcg_b7d80f0f-8c96-446e-a31e-90913d19d661/manager/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.269043 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-sfz28_a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc/kube-rbac-proxy/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.388941 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-sfz28_a4bdc7e9-1988-4650-8f1c-2d5d8a71b4cc/manager/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.478930 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-4nvdf_24be9f7b-3c61-4434-8863-b3b5d9e5ee2a/kube-rbac-proxy/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.498950 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-4nvdf_24be9f7b-3c61-4434-8863-b3b5d9e5ee2a/manager/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.617436 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-qkqbk_5a3fecb8-5f79-4f05-9169-7d5cf9072f2c/kube-rbac-proxy/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.690874 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9b1325b-4159-4047-b32e-9a2d53a97f9a" path="/var/lib/kubelet/pods/d9b1325b-4159-4047-b32e-9a2d53a97f9a/volumes" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.710338 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-qkqbk_5a3fecb8-5f79-4f05-9169-7d5cf9072f2c/manager/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.783420 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-ch9nw_20ad49e2-5077-43e2-8671-58457cf10432/kube-rbac-proxy/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.835438 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-ch9nw_20ad49e2-5077-43e2-8671-58457cf10432/manager/0.log" Oct 03 15:02:52 crc kubenswrapper[4861]: I1003 15:02:52.916207 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-qcfrn_c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1/kube-rbac-proxy/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.010726 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-qcfrn_c3e0bf46-a854-4e79-a1d4-4cb5d9c5eaf1/manager/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.121559 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-8dttf_567dc82d-835f-4cf9-805d-a3d65c82b823/kube-rbac-proxy/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.237146 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-8dttf_567dc82d-835f-4cf9-805d-a3d65c82b823/manager/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.325063 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-mlqw7_cb92f20f-a3f0-42b3-ae87-11e0215c62fb/kube-rbac-proxy/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.327265 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-mlqw7_cb92f20f-a3f0-42b3-ae87-11e0215c62fb/manager/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.464532 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-k4wkd_685fbda0-ab44-4f3c-8614-d87234d29d2f/kube-rbac-proxy/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.533352 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-k4wkd_685fbda0-ab44-4f3c-8614-d87234d29d2f/manager/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.609744 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-x5mwn_c7d483ab-e555-49c8-93c9-8bb99928605a/kube-rbac-proxy/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.693097 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-x5mwn_c7d483ab-e555-49c8-93c9-8bb99928605a/manager/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.742274 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-rx55k_9bf321cf-e938-46ed-b8b9-01418f85de45/kube-rbac-proxy/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.832123 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-rx55k_9bf321cf-e938-46ed-b8b9-01418f85de45/manager/0.log" Oct 03 15:02:53 crc kubenswrapper[4861]: I1003 15:02:53.930677 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-6qwsn_57962592-42ae-47a9-a651-7c5d0e3ffad5/kube-rbac-proxy/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.014396 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-6qwsn_57962592-42ae-47a9-a651-7c5d0e3ffad5/manager/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.090318 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-v26h5_fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8/kube-rbac-proxy/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.217216 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-v26h5_fdf89986-9a3f-4f07-b0ad-fb900a6e2fd8/manager/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.296971 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-qlrdc_16695342-b32b-4303-b248-d616d2ab9676/kube-rbac-proxy/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.336199 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-qlrdc_16695342-b32b-4303-b248-d616d2ab9676/manager/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.477903 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp_274f0db6-b0be-41da-a6ca-47160736f8e8/kube-rbac-proxy/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.495756 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678f4mvp_274f0db6-b0be-41da-a6ca-47160736f8e8/manager/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.621404 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8fd589ff7-d8tns_ea16cb5e-459c-4ad2-9579-17bd88783158/kube-rbac-proxy/0.log" Oct 03 15:02:54 crc kubenswrapper[4861]: I1003 15:02:54.797453 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b7969687c-wz6wm_49f1e7a1-ab0a-46b2-97c6-a069d913657d/kube-rbac-proxy/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.054140 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b7969687c-wz6wm_49f1e7a1-ab0a-46b2-97c6-a069d913657d/operator/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.108737 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-tkc8v_48467041-0fb2-4032-a831-11500776f212/registry-server/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.417859 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-xgq77_5f7b92ed-6113-4c12-a8ec-25589c15dd32/kube-rbac-proxy/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.444508 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-xgq77_5f7b92ed-6113-4c12-a8ec-25589c15dd32/manager/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.587760 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-5tm2n_92f8bc55-a8b1-41dd-9490-12c2280106ed/kube-rbac-proxy/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.631896 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-5tm2n_92f8bc55-a8b1-41dd-9490-12c2280106ed/manager/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.805318 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8fd589ff7-d8tns_ea16cb5e-459c-4ad2-9579-17bd88783158/manager/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.844191 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-x75vq_68cf4faf-6f3d-4dfe-9a86-22a803baf77c/operator/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.918651 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9wwkj_1fa5571a-b9b5-4395-aa7a-a32a670f8e92/kube-rbac-proxy/0.log" Oct 03 15:02:55 crc kubenswrapper[4861]: I1003 15:02:55.972754 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-9wwkj_1fa5571a-b9b5-4395-aa7a-a32a670f8e92/manager/0.log" Oct 03 15:02:56 crc kubenswrapper[4861]: I1003 15:02:56.046494 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-5tfpv_682b0ab4-202c-4455-872c-715e9e6c4ee1/kube-rbac-proxy/0.log" Oct 03 15:02:56 crc kubenswrapper[4861]: I1003 15:02:56.170334 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-5tfpv_682b0ab4-202c-4455-872c-715e9e6c4ee1/manager/0.log" Oct 03 15:02:56 crc kubenswrapper[4861]: I1003 15:02:56.187442 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-stsgz_896120d6-4995-4fd4-a238-4b34c6128326/kube-rbac-proxy/0.log" Oct 03 15:02:56 crc kubenswrapper[4861]: I1003 15:02:56.243553 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-stsgz_896120d6-4995-4fd4-a238-4b34c6128326/manager/0.log" Oct 03 15:02:56 crc kubenswrapper[4861]: I1003 15:02:56.343114 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-mwpqn_0ab6f3ab-52bf-404a-8102-195683e803e8/kube-rbac-proxy/0.log" Oct 03 15:02:56 crc kubenswrapper[4861]: I1003 15:02:56.406002 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-mwpqn_0ab6f3ab-52bf-404a-8102-195683e803e8/manager/0.log" Oct 03 15:03:12 crc kubenswrapper[4861]: I1003 15:03:12.977714 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fcq84_876d6207-8976-4d02-887b-b431a4821eab/control-plane-machine-set-operator/0.log" Oct 03 15:03:13 crc kubenswrapper[4861]: I1003 15:03:13.108472 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rg4gb_04154cd6-a67c-42d3-bbb0-951c4986390d/kube-rbac-proxy/0.log" Oct 03 15:03:13 crc kubenswrapper[4861]: I1003 15:03:13.171338 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rg4gb_04154cd6-a67c-42d3-bbb0-951c4986390d/machine-api-operator/0.log" Oct 03 15:03:25 crc kubenswrapper[4861]: I1003 15:03:25.726376 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-mpwfn_f5edb507-a930-4aac-b964-575b85ab8676/cert-manager-controller/0.log" Oct 03 15:03:25 crc kubenswrapper[4861]: I1003 15:03:25.887203 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-872cz_16b81380-ffaa-4755-a4d0-86c03eb2094f/cert-manager-webhook/0.log" Oct 03 15:03:25 crc kubenswrapper[4861]: I1003 15:03:25.931613 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-fkrcs_10cb85f1-4e24-4793-b1d0-8c3b11ceb85c/cert-manager-cainjector/0.log" Oct 03 15:03:38 crc kubenswrapper[4861]: I1003 15:03:38.113357 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-jmr9f_767a82d2-0bea-436b-b63a-c5bbf0de86b8/nmstate-console-plugin/0.log" Oct 03 15:03:38 crc kubenswrapper[4861]: I1003 15:03:38.278815 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-thvxb_c1ec679d-6458-489b-bdb3-6c6ec465d695/nmstate-handler/0.log" Oct 03 15:03:38 crc kubenswrapper[4861]: I1003 15:03:38.332186 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-b4mxl_74a61ffa-6414-4072-952f-d3a9e5df2cad/kube-rbac-proxy/0.log" Oct 03 15:03:38 crc kubenswrapper[4861]: I1003 15:03:38.371018 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-b4mxl_74a61ffa-6414-4072-952f-d3a9e5df2cad/nmstate-metrics/0.log" Oct 03 15:03:38 crc kubenswrapper[4861]: I1003 15:03:38.567538 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-v6q9r_cecb92af-dba6-4e7b-825c-97d7fcd5cc5d/nmstate-webhook/0.log" Oct 03 15:03:38 crc kubenswrapper[4861]: I1003 15:03:38.585132 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-kxqsn_28617d69-b62d-41a8-bb48-d89be9a37676/nmstate-operator/0.log" Oct 03 15:03:52 crc kubenswrapper[4861]: I1003 15:03:52.740870 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-cm757_6aca0805-4feb-4b13-8b46-f41858176496/kube-rbac-proxy/0.log" Oct 03 15:03:52 crc kubenswrapper[4861]: I1003 15:03:52.864205 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-cm757_6aca0805-4feb-4b13-8b46-f41858176496/controller/0.log" Oct 03 15:03:52 crc kubenswrapper[4861]: I1003 15:03:52.991541 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-jn69p_9480abea-a1f1-4416-880b-8fb72fd8716b/frr-k8s-webhook-server/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.033658 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.244741 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.248652 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.252218 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.323033 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.495456 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.500354 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.501474 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.554795 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.698907 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-frr-files/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.700668 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-reloader/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.730344 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/cp-metrics/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.755013 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/controller/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.896404 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/frr-metrics/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.963738 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/kube-rbac-proxy-frr/0.log" Oct 03 15:03:53 crc kubenswrapper[4861]: I1003 15:03:53.970415 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/kube-rbac-proxy/0.log" Oct 03 15:03:54 crc kubenswrapper[4861]: I1003 15:03:54.208267 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/reloader/0.log" Oct 03 15:03:54 crc kubenswrapper[4861]: I1003 15:03:54.295010 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5dbf4fd78b-2d6hn_03b31621-6432-424e-a55f-aecaf846b082/manager/0.log" Oct 03 15:03:54 crc kubenswrapper[4861]: I1003 15:03:54.514592 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5b7c788d54-q5fg6_f8fb711a-85c6-4014-8f23-7edabc8faf74/webhook-server/0.log" Oct 03 15:03:54 crc kubenswrapper[4861]: I1003 15:03:54.833164 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vdx27_6d48b95e-8866-4456-95c0-4c3b728f6f93/kube-rbac-proxy/0.log" Oct 03 15:03:55 crc kubenswrapper[4861]: I1003 15:03:55.288163 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vdx27_6d48b95e-8866-4456-95c0-4c3b728f6f93/speaker/0.log" Oct 03 15:03:55 crc kubenswrapper[4861]: I1003 15:03:55.392721 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xxhsl_f3324863-98d8-44d0-91dc-4bb8dc243a8f/frr/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.210552 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/util/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.357811 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/util/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.431415 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/pull/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.449131 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/pull/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.610061 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/util/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.632857 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/extract/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.658126 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xk9jt_b2677784-5297-4d5b-8558-e904b9668fa5/pull/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.787468 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-utilities/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.954935 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-content/0.log" Oct 03 15:04:08 crc kubenswrapper[4861]: I1003 15:04:08.978409 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-content/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.006363 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-utilities/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.190122 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-content/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.247832 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/extract-utilities/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.525117 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-utilities/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.725120 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-content/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.768709 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-utilities/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.776180 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5wgm_fe2ed840-3dd5-4276-b039-eec14967f4ee/registry-server/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.839172 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-content/0.log" Oct 03 15:04:09 crc kubenswrapper[4861]: I1003 15:04:09.988705 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-utilities/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.045485 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/extract-content/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.289034 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/util/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.653272 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/pull/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.661670 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6q6wr_e67e431c-da77-437f-a3e7-1e6abb3ccc8b/registry-server/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.674109 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/util/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.716686 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/pull/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.882367 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/util/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.894366 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/extract/0.log" Oct 03 15:04:10 crc kubenswrapper[4861]: I1003 15:04:10.916678 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccx2jv_297c9b28-92e3-4c8e-bb08-63af1b637718/pull/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.117118 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-p5qrs_6d1c8721-e495-45da-8947-09c44940673d/marketplace-operator/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.178743 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-utilities/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.318073 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-utilities/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.339341 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-content/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.443655 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-content/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.583971 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-utilities/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.644369 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/extract-content/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.785933 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9nrfv_cded624e-5d0b-45da-9e1f-de6def114c00/registry-server/0.log" Oct 03 15:04:11 crc kubenswrapper[4861]: I1003 15:04:11.872256 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-utilities/0.log" Oct 03 15:04:12 crc kubenswrapper[4861]: I1003 15:04:12.034554 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-content/0.log" Oct 03 15:04:12 crc kubenswrapper[4861]: I1003 15:04:12.060081 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-content/0.log" Oct 03 15:04:12 crc kubenswrapper[4861]: I1003 15:04:12.081996 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-utilities/0.log" Oct 03 15:04:12 crc kubenswrapper[4861]: I1003 15:04:12.245592 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-content/0.log" Oct 03 15:04:12 crc kubenswrapper[4861]: I1003 15:04:12.255900 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/extract-utilities/0.log" Oct 03 15:04:12 crc kubenswrapper[4861]: I1003 15:04:12.957722 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vl5pp_d2c62b5f-5f8c-4245-90a3-fb06846e063b/registry-server/0.log" Oct 03 15:04:30 crc kubenswrapper[4861]: I1003 15:04:30.145428 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:04:30 crc kubenswrapper[4861]: I1003 15:04:30.146008 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:04:48 crc kubenswrapper[4861]: E1003 15:04:48.465099 4861 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.28:37476->38.129.56.28:42013: write tcp 38.129.56.28:37476->38.129.56.28:42013: write: broken pipe Oct 03 15:05:00 crc kubenswrapper[4861]: I1003 15:05:00.145413 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:05:00 crc kubenswrapper[4861]: I1003 15:05:00.145914 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.144854 4861 patch_prober.go:28] interesting pod/machine-config-daemon-t9slw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.145669 4861 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.145733 4861 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.146831 4861 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd"} pod="openshift-machine-config-operator/machine-config-daemon-t9slw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.146943 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerName="machine-config-daemon" containerID="cri-o://9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" gracePeriod=600 Oct 03 15:05:30 crc kubenswrapper[4861]: E1003 15:05:30.269418 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.469175 4861 generic.go:334] "Generic (PLEG): container finished" podID="d8335d3f-417e-4114-b306-a3d8f6c31348" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" exitCode=0 Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.469300 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" event={"ID":"d8335d3f-417e-4114-b306-a3d8f6c31348","Type":"ContainerDied","Data":"9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd"} Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.469408 4861 scope.go:117] "RemoveContainer" containerID="77c3015bab5401c3cc6bb663b4e9aeb836cb468891caf7c22966f819ab8330cb" Oct 03 15:05:30 crc kubenswrapper[4861]: I1003 15:05:30.470494 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:05:30 crc kubenswrapper[4861]: E1003 15:05:30.471605 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:05:43 crc kubenswrapper[4861]: I1003 15:05:43.681438 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:05:43 crc kubenswrapper[4861]: E1003 15:05:43.682370 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:05:55 crc kubenswrapper[4861]: I1003 15:05:55.681371 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:05:55 crc kubenswrapper[4861]: E1003 15:05:55.682061 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:06:10 crc kubenswrapper[4861]: I1003 15:06:10.687802 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:06:10 crc kubenswrapper[4861]: E1003 15:06:10.688753 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.507102 4861 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-klzvl"] Oct 03 15:06:21 crc kubenswrapper[4861]: E1003 15:06:21.509630 4861 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9b1325b-4159-4047-b32e-9a2d53a97f9a" containerName="container-00" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.509656 4861 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9b1325b-4159-4047-b32e-9a2d53a97f9a" containerName="container-00" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.509894 4861 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9b1325b-4159-4047-b32e-9a2d53a97f9a" containerName="container-00" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.511490 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.527811 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-klzvl"] Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.610937 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-catalog-content\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.611056 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whdpx\" (UniqueName: \"kubernetes.io/projected/733b8bee-c808-4755-af18-850749e64670-kube-api-access-whdpx\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.611109 4861 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-utilities\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.712472 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-catalog-content\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.713032 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-catalog-content\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.713287 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whdpx\" (UniqueName: \"kubernetes.io/projected/733b8bee-c808-4755-af18-850749e64670-kube-api-access-whdpx\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.713667 4861 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-utilities\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.713989 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-utilities\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.736033 4861 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whdpx\" (UniqueName: \"kubernetes.io/projected/733b8bee-c808-4755-af18-850749e64670-kube-api-access-whdpx\") pod \"redhat-marketplace-klzvl\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:21 crc kubenswrapper[4861]: I1003 15:06:21.830975 4861 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:22 crc kubenswrapper[4861]: I1003 15:06:22.376779 4861 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-klzvl"] Oct 03 15:06:23 crc kubenswrapper[4861]: I1003 15:06:23.033499 4861 generic.go:334] "Generic (PLEG): container finished" podID="733b8bee-c808-4755-af18-850749e64670" containerID="50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef" exitCode=0 Oct 03 15:06:23 crc kubenswrapper[4861]: I1003 15:06:23.033895 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerDied","Data":"50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef"} Oct 03 15:06:23 crc kubenswrapper[4861]: I1003 15:06:23.033935 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerStarted","Data":"5ee2615c4783c26a33131ea5346f370e7ca66ff549d7f6a209a24ff8fe5b967d"} Oct 03 15:06:23 crc kubenswrapper[4861]: I1003 15:06:23.037085 4861 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 15:06:23 crc kubenswrapper[4861]: I1003 15:06:23.681201 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:06:23 crc kubenswrapper[4861]: E1003 15:06:23.681757 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:06:25 crc kubenswrapper[4861]: I1003 15:06:25.058094 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerStarted","Data":"1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225"} Oct 03 15:06:26 crc kubenswrapper[4861]: I1003 15:06:26.070551 4861 generic.go:334] "Generic (PLEG): container finished" podID="733b8bee-c808-4755-af18-850749e64670" containerID="1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225" exitCode=0 Oct 03 15:06:26 crc kubenswrapper[4861]: I1003 15:06:26.070656 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerDied","Data":"1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225"} Oct 03 15:06:28 crc kubenswrapper[4861]: I1003 15:06:28.091958 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerStarted","Data":"84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8"} Oct 03 15:06:28 crc kubenswrapper[4861]: I1003 15:06:28.130061 4861 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-klzvl" podStartSLOduration=3.007261934 podStartE2EDuration="7.130042562s" podCreationTimestamp="2025-10-03 15:06:21 +0000 UTC" firstStartedPulling="2025-10-03 15:06:23.036723186 +0000 UTC m=+5697.034708263" lastFinishedPulling="2025-10-03 15:06:27.159503844 +0000 UTC m=+5701.157488891" observedRunningTime="2025-10-03 15:06:28.125934511 +0000 UTC m=+5702.123919558" watchObservedRunningTime="2025-10-03 15:06:28.130042562 +0000 UTC m=+5702.128027609" Oct 03 15:06:31 crc kubenswrapper[4861]: I1003 15:06:31.831932 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:31 crc kubenswrapper[4861]: I1003 15:06:31.832538 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:31 crc kubenswrapper[4861]: I1003 15:06:31.917381 4861 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:32 crc kubenswrapper[4861]: I1003 15:06:32.217967 4861 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.303487 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-klzvl"] Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.304014 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-klzvl" podUID="733b8bee-c808-4755-af18-850749e64670" containerName="registry-server" containerID="cri-o://84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8" gracePeriod=2 Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.699019 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:06:36 crc kubenswrapper[4861]: E1003 15:06:36.699743 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.737446 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.827272 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-utilities\") pod \"733b8bee-c808-4755-af18-850749e64670\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.827344 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whdpx\" (UniqueName: \"kubernetes.io/projected/733b8bee-c808-4755-af18-850749e64670-kube-api-access-whdpx\") pod \"733b8bee-c808-4755-af18-850749e64670\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.827513 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-catalog-content\") pod \"733b8bee-c808-4755-af18-850749e64670\" (UID: \"733b8bee-c808-4755-af18-850749e64670\") " Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.833114 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-utilities" (OuterVolumeSpecName: "utilities") pod "733b8bee-c808-4755-af18-850749e64670" (UID: "733b8bee-c808-4755-af18-850749e64670"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.839376 4861 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.840014 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/733b8bee-c808-4755-af18-850749e64670-kube-api-access-whdpx" (OuterVolumeSpecName: "kube-api-access-whdpx") pod "733b8bee-c808-4755-af18-850749e64670" (UID: "733b8bee-c808-4755-af18-850749e64670"). InnerVolumeSpecName "kube-api-access-whdpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.846435 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "733b8bee-c808-4755-af18-850749e64670" (UID: "733b8bee-c808-4755-af18-850749e64670"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.941133 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whdpx\" (UniqueName: \"kubernetes.io/projected/733b8bee-c808-4755-af18-850749e64670-kube-api-access-whdpx\") on node \"crc\" DevicePath \"\"" Oct 03 15:06:36 crc kubenswrapper[4861]: I1003 15:06:36.941360 4861 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733b8bee-c808-4755-af18-850749e64670-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.184331 4861 generic.go:334] "Generic (PLEG): container finished" podID="733b8bee-c808-4755-af18-850749e64670" containerID="84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8" exitCode=0 Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.184380 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerDied","Data":"84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8"} Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.184421 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klzvl" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.184442 4861 scope.go:117] "RemoveContainer" containerID="84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.184427 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klzvl" event={"ID":"733b8bee-c808-4755-af18-850749e64670","Type":"ContainerDied","Data":"5ee2615c4783c26a33131ea5346f370e7ca66ff549d7f6a209a24ff8fe5b967d"} Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.213139 4861 scope.go:117] "RemoveContainer" containerID="1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.243457 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-klzvl"] Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.250282 4861 scope.go:117] "RemoveContainer" containerID="50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.254366 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-klzvl"] Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.310957 4861 scope.go:117] "RemoveContainer" containerID="84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8" Oct 03 15:06:37 crc kubenswrapper[4861]: E1003 15:06:37.311375 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8\": container with ID starting with 84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8 not found: ID does not exist" containerID="84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.311405 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8"} err="failed to get container status \"84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8\": rpc error: code = NotFound desc = could not find container \"84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8\": container with ID starting with 84067b8a1d4b11861cb059be908849f1cc3c3764d8394005efdb0c88cd5f09d8 not found: ID does not exist" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.311430 4861 scope.go:117] "RemoveContainer" containerID="1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225" Oct 03 15:06:37 crc kubenswrapper[4861]: E1003 15:06:37.311736 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225\": container with ID starting with 1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225 not found: ID does not exist" containerID="1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.311826 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225"} err="failed to get container status \"1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225\": rpc error: code = NotFound desc = could not find container \"1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225\": container with ID starting with 1a6914bb5003d7800cb1accac510297ee901a1f097e5e3f33068664efb460225 not found: ID does not exist" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.311891 4861 scope.go:117] "RemoveContainer" containerID="50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef" Oct 03 15:06:37 crc kubenswrapper[4861]: E1003 15:06:37.312130 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef\": container with ID starting with 50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef not found: ID does not exist" containerID="50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef" Oct 03 15:06:37 crc kubenswrapper[4861]: I1003 15:06:37.312157 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef"} err="failed to get container status \"50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef\": rpc error: code = NotFound desc = could not find container \"50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef\": container with ID starting with 50b204bfeb4aa1f1d08d08917d6a3191012b0792acf4fcd44cdb91d28ca2daef not found: ID does not exist" Oct 03 15:06:38 crc kubenswrapper[4861]: I1003 15:06:38.705995 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="733b8bee-c808-4755-af18-850749e64670" path="/var/lib/kubelet/pods/733b8bee-c808-4755-af18-850749e64670/volumes" Oct 03 15:06:46 crc kubenswrapper[4861]: I1003 15:06:46.299026 4861 generic.go:334] "Generic (PLEG): container finished" podID="5c7fbe42-081b-46c6-a12f-ce1c9e30020d" containerID="7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616" exitCode=0 Oct 03 15:06:46 crc kubenswrapper[4861]: I1003 15:06:46.299128 4861 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4tdkf/must-gather-xlw45" event={"ID":"5c7fbe42-081b-46c6-a12f-ce1c9e30020d","Type":"ContainerDied","Data":"7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616"} Oct 03 15:06:46 crc kubenswrapper[4861]: I1003 15:06:46.300481 4861 scope.go:117] "RemoveContainer" containerID="7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616" Oct 03 15:06:46 crc kubenswrapper[4861]: I1003 15:06:46.883836 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4tdkf_must-gather-xlw45_5c7fbe42-081b-46c6-a12f-ce1c9e30020d/gather/0.log" Oct 03 15:06:49 crc kubenswrapper[4861]: I1003 15:06:49.682351 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:06:49 crc kubenswrapper[4861]: E1003 15:06:49.682693 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:07:00 crc kubenswrapper[4861]: I1003 15:07:00.923135 4861 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4tdkf/must-gather-xlw45"] Oct 03 15:07:00 crc kubenswrapper[4861]: I1003 15:07:00.923856 4861 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-4tdkf/must-gather-xlw45" podUID="5c7fbe42-081b-46c6-a12f-ce1c9e30020d" containerName="copy" containerID="cri-o://8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247" gracePeriod=2 Oct 03 15:07:00 crc kubenswrapper[4861]: I1003 15:07:00.941722 4861 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4tdkf/must-gather-xlw45"] Oct 03 15:07:01 crc kubenswrapper[4861]: E1003 15:07:01.209033 4861 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c7fbe42_081b_46c6_a12f_ce1c9e30020d.slice/crio-conmon-8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247.scope\": RecentStats: unable to find data in memory cache]" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.377648 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4tdkf_must-gather-xlw45_5c7fbe42-081b-46c6-a12f-ce1c9e30020d/copy/0.log" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.378601 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.461540 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-must-gather-output\") pod \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.462015 4861 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dn7s\" (UniqueName: \"kubernetes.io/projected/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-kube-api-access-8dn7s\") pod \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\" (UID: \"5c7fbe42-081b-46c6-a12f-ce1c9e30020d\") " Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.466703 4861 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4tdkf_must-gather-xlw45_5c7fbe42-081b-46c6-a12f-ce1c9e30020d/copy/0.log" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.467070 4861 generic.go:334] "Generic (PLEG): container finished" podID="5c7fbe42-081b-46c6-a12f-ce1c9e30020d" containerID="8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247" exitCode=143 Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.467142 4861 scope.go:117] "RemoveContainer" containerID="8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.467301 4861 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4tdkf/must-gather-xlw45" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.479778 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-kube-api-access-8dn7s" (OuterVolumeSpecName: "kube-api-access-8dn7s") pod "5c7fbe42-081b-46c6-a12f-ce1c9e30020d" (UID: "5c7fbe42-081b-46c6-a12f-ce1c9e30020d"). InnerVolumeSpecName "kube-api-access-8dn7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.525956 4861 scope.go:117] "RemoveContainer" containerID="7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.564685 4861 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dn7s\" (UniqueName: \"kubernetes.io/projected/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-kube-api-access-8dn7s\") on node \"crc\" DevicePath \"\"" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.578345 4861 scope.go:117] "RemoveContainer" containerID="8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247" Oct 03 15:07:01 crc kubenswrapper[4861]: E1003 15:07:01.578801 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247\": container with ID starting with 8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247 not found: ID does not exist" containerID="8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.578852 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247"} err="failed to get container status \"8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247\": rpc error: code = NotFound desc = could not find container \"8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247\": container with ID starting with 8dff02f17b673be381071e31f2417a381dd957315bf7dd4fbe9aaab4559f7247 not found: ID does not exist" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.578872 4861 scope.go:117] "RemoveContainer" containerID="7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616" Oct 03 15:07:01 crc kubenswrapper[4861]: E1003 15:07:01.579377 4861 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616\": container with ID starting with 7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616 not found: ID does not exist" containerID="7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.579411 4861 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616"} err="failed to get container status \"7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616\": rpc error: code = NotFound desc = could not find container \"7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616\": container with ID starting with 7a16c298bf544221597034843c0445197f8e8c270b6635faa09ce7669607e616 not found: ID does not exist" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.666317 4861 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5c7fbe42-081b-46c6-a12f-ce1c9e30020d" (UID: "5c7fbe42-081b-46c6-a12f-ce1c9e30020d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:07:01 crc kubenswrapper[4861]: I1003 15:07:01.767674 4861 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5c7fbe42-081b-46c6-a12f-ce1c9e30020d-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 03 15:07:02 crc kubenswrapper[4861]: I1003 15:07:02.696641 4861 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c7fbe42-081b-46c6-a12f-ce1c9e30020d" path="/var/lib/kubelet/pods/5c7fbe42-081b-46c6-a12f-ce1c9e30020d/volumes" Oct 03 15:07:03 crc kubenswrapper[4861]: I1003 15:07:03.681731 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:07:03 crc kubenswrapper[4861]: E1003 15:07:03.682523 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:07:15 crc kubenswrapper[4861]: I1003 15:07:15.681025 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:07:15 crc kubenswrapper[4861]: E1003 15:07:15.681748 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:07:29 crc kubenswrapper[4861]: I1003 15:07:29.680679 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:07:29 crc kubenswrapper[4861]: E1003 15:07:29.682587 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:07:40 crc kubenswrapper[4861]: I1003 15:07:40.681405 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:07:40 crc kubenswrapper[4861]: E1003 15:07:40.682519 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:07:51 crc kubenswrapper[4861]: I1003 15:07:51.681852 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:07:51 crc kubenswrapper[4861]: E1003 15:07:51.682667 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:08:05 crc kubenswrapper[4861]: I1003 15:08:05.681399 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:08:05 crc kubenswrapper[4861]: E1003 15:08:05.682310 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:08:16 crc kubenswrapper[4861]: I1003 15:08:16.696158 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:08:16 crc kubenswrapper[4861]: E1003 15:08:16.697363 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" Oct 03 15:08:30 crc kubenswrapper[4861]: I1003 15:08:30.683415 4861 scope.go:117] "RemoveContainer" containerID="9558777aae4f5f6fe07fa47c538bc467cb19e05037fc658a37aed876cda97ecd" Oct 03 15:08:30 crc kubenswrapper[4861]: E1003 15:08:30.684486 4861 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t9slw_openshift-machine-config-operator(d8335d3f-417e-4114-b306-a3d8f6c31348)\"" pod="openshift-machine-config-operator/machine-config-daemon-t9slw" podUID="d8335d3f-417e-4114-b306-a3d8f6c31348" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067763401024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067763401017373 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067747432016524 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067747432015474 5ustar corecore